Add internal server test

This commit is contained in:
extremeheat 2021-03-24 08:30:24 -04:00
commit bb9b94fa02
4 changed files with 274 additions and 16 deletions

View file

@ -10,6 +10,7 @@
"pretest": "npm run lint",
"lint": "standard",
"vanillaServer": "node tools/startVanillaServer.js",
"dumpPackets": "node tools/genPacketDumps.js",
"fix": "standard --fix"
},
"keywords": [
@ -33,7 +34,7 @@
"node-fetch": "^2.6.1",
"prismarine-nbt": "^1.5.0",
"protodef": "^1.11.0",
"raknet-native": "^0.1.0",
"raknet-native": "^0.2.0",
"uuid-1345": "^1.0.2"
},
"devDependencies": {

204
test/internal.js Normal file
View file

@ -0,0 +1,204 @@
process.env.DEBUG = 'minecraft-protocol raknet'
const { Server, Client } = require('../')
const { dumpPackets, hasDumps } = require('../tools/genPacketDumps')
const DataProvider = require('../data/provider')
// First we need to dump some packets that a vanilla server would send a vanilla
// client. Then we can replay those back in our custom server.
function prepare (version) {
if (!hasDumps(version)) {
return dumpPackets(version)
}
}
async function startTest (version = '1.16.210', ok) {
await prepare(version)
const Item = require('../types/Item')(version)
const port = 19130
const server = new Server({ hostname: '0.0.0.0', port, version })
function getPath (packetPath) {
return DataProvider(server.options.protocolVersion).getPath(packetPath)
}
function get (packetPath) {
return require(getPath('sample/' + packetPath))
}
server.listen()
console.log('Started server')
const respawnPacket = get('packets/respawn.json')
const chunks = await requestChunks(respawnPacket.x, respawnPacket.z, 1)
let loop
// server logic
server.on('connect', client => {
client.on('join', () => {
console.log('Client joined', client.getData())
client.write('resource_packs_info', {
must_accept: false,
has_scripts: false,
behaviour_packs: [],
texture_packs: []
})
client.once('resource_pack_client_response', async rp => {
// Tell the server we will compress everything (>=1 byte)
client.write('network_settings', { compression_threshold: 1 })
// Send some inventory slots
for (let i = 0; i < 3; i++) {
client.queue('inventory_slot', { window_id: 'armor', slot: 0, item: new Item().toBedrock() })
}
// client.queue('inventory_transaction', get('packets/inventory_transaction.json'))
client.queue('player_list', get('packets/player_list.json'))
client.queue('start_game', get('packets/start_game.json'))
client.queue('item_component', { entries: [] })
client.queue('set_spawn_position', get('packets/set_spawn_position.json'))
client.queue('set_time', { time: 5433771 })
client.queue('set_difficulty', { difficulty: 1 })
client.queue('set_commands_enabled', { enabled: true })
client.queue('adventure_settings', get('packets/adventure_settings.json'))
client.queue('biome_definition_list', get('packets/biome_definition_list.json'))
client.queue('available_entity_identifiers', get('packets/available_entity_identifiers.json'))
client.queue('update_attributes', get('packets/update_attributes.json'))
client.queue('creative_content', get('packets/creative_content.json'))
client.queue('inventory_content', get('packets/inventory_content.json'))
client.queue('player_hotbar', { selected_slot: 3, window_id: 'inventory', select_slot: true })
client.queue('crafting_data', get('packets/crafting_data.json'))
client.queue('available_commands', get('packets/available_commands.json'))
client.queue('chunk_radius_update', { chunk_radius: 5 })
// client.queue('set_entity_data', get('packets/set_entity_data.json'))
client.queue('game_rules_changed', get('packets/game_rules_changed.json'))
client.queue('respawn', get('packets/game_rules_changed.json'))
for (const chunk of chunks) {
client.queue('level_chunk', chunk)
}
loop = setInterval(() => {
client.write('network_chunk_publisher_update', { coordinates: { x: 646, y: 130, z: 77 }, radius: 64 })
}, 9500)
setTimeout(() => {
client.write('play_status', { status: 'player_spawn' })
}, 6000)
// Respond to tick synchronization packets
client.on('tick_sync', (packet) => {
client.queue('tick_sync', {
request_time: packet.request_time,
response_time: BigInt(Date.now())
})
})
})
})
})
// client logic
const client = new Client({
hostname: '127.0.0.1',
port,
username: 'Notch',
version,
offline: true
})
console.log('Started client')
client.once('resource_packs_info', (packet) => {
client.write('resource_pack_client_response', {
response_status: 'completed',
resourcepackids: []
})
client.once('resource_pack_stack', (stack) => {
client.write('resource_pack_client_response', {
response_status: 'completed',
resourcepackids: []
})
})
client.queue('client_cache_status', { enabled: false })
client.queue('request_chunk_radius', { chunk_radius: 1 })
client.queue('tick_sync', { request_time: BigInt(Date.now()), response_time: 0n })
})
client.once('spawn', () => {
console.info('Client spawend!')
setTimeout(() => {
client.close()
server.close()
ok?.()
}, 500)
clearInterval(loop)
})
}
const { ChunkColumn, Version } = require('bedrock-provider')
const { waitFor } = require('../src/datatypes/util')
const mcData = require('minecraft-data')('1.16')
async function requestChunks (x, z, radius) {
const cxStart = (x >> 4) - radius
const cxEnd = (x >> 4) + radius
const czStart = (z >> 4) - radius
const czEnd = (z >> 4) + radius
const stone = mcData.blocksByName.stone
const chunks = []
for (let cx = cxStart; cx < cxEnd; cx++) {
for (let cz = czStart; cz < czEnd; cz++) {
console.log('reading chunk at ', cx, cz)
const cc = new ChunkColumn(Version.v1_2_0_bis, x, z)
for (let x = 0; x < 16; x++) {
for (let y = 0; y < 60; y++) {
for (let z = 0; z < 16; z++) {
cc.setBlock(x, y, z, stone)
}
}
}
if (!cc) {
console.log('no chunk')
continue
}
const cbuf = await cc.networkEncodeNoCache()
chunks.push({
x: cx,
z: cz,
sub_chunk_count: cc.sectionsLen,
cache_enabled: false,
blobs: [],
payload: cbuf
})
// console.log('Ht',cc.sectionsLen,cc.sections)
}
}
return chunks
}
async function timedTest (version) {
await waitFor((res) => {
startTest(version, res)
}, 1000 * 60, () => {
throw Error('timed out')
})
console.info('✔ ok')
}
if (!module.parent) timedTest()
module.exports = { startTest, timedTest, requestChunks }

View file

@ -1,20 +1,27 @@
/* eslint-disable */
// Collect sample packets needed for `serverTest.js`
// process.env.DEBUG = 'minecraft-protocol'
const fs = require('fs')
const vanillaServer = require('../tools/startVanillaServer')
const { Client } = require('../src/client')
const { serialize, waitFor } = require('../src/datatypes/util')
const { serialize, waitFor, getFiles } = require('../src/datatypes/util')
const { CURRENT_VERSION } = require('../src/options')
const { join } = require('path')
function hasDumps (version) {
const root = join(__dirname, `../data/${version}/sample/packets/`)
if (!fs.existsSync(root) || getFiles(root).length < 10) {
return false
}
return true
}
let loop
async function main() {
async function dump (version, force) {
const random = ((Math.random() * 100) | 0)
const port = 19130 + random
const handle = await vanillaServer.startServerAndWait(CURRENT_VERSION, 1000 * 120, { 'server-port': port, path: 'bds_' })
const handle = await vanillaServer.startServerAndWait(version || CURRENT_VERSION, 1000 * 120, { 'server-port': port, path: 'bds_' })
console.log('Started server')
const client = new Client({
@ -25,9 +32,10 @@ async function main() {
})
return waitFor(async res => {
const root = join(__dirname, `../data/${client.options.version}/sample/packets/`)
if (!fs.existsSync(root)) {
fs.mkdirSync(root, { recursive: true })
const root = join(__dirname, `../data/${client.options.version}/sample/`)
if (!fs.existsSync(root + 'packets') || !fs.existsSync(root + 'chunks')) {
fs.mkdirSync(root + 'packets', { recursive: true })
fs.mkdirSync(root + 'chunks', { recursive: true })
}
client.once('resource_packs_info', (packet) => {
@ -43,7 +51,6 @@ async function main() {
})
})
client.queue('client_cache_status', { enabled: false })
client.queue('request_chunk_radius', { chunk_radius: 1 })
// client.queue('tick_sync', { request_time: BigInt(Date.now()), response_time: 0n })
@ -54,11 +61,17 @@ async function main() {
}, 200)
})
client.on('packet', pakData => { // Packet dumping
if (pakData.name == 'level_chunk') return
let i = 0
client.on('packet', async packet => { // Packet dumping
const { name, params } = packet.data
if (name === 'level_chunk') {
fs.writeFileSync(root + `chunks/${name}-${i++}.bin`, packet.buffer)
return
}
try {
if (!fs.existsSync(root + `${pakData.name}.json`)) {
fs.promises.writeFile(root + `${pakData.name}.json`, serialize(pakData.params, 2))
if (!fs.existsSync(root + `packets/${name}.json`) || force) {
fs.writeFileSync(root + `packets/${name}.json`, serialize(params, 2))
}
} catch (e) { console.log(e) }
})
@ -78,6 +91,9 @@ async function main() {
})
}
main().then(() => {
console.log('Successfully dumped packets')
})
if (!module.parent) {
dump(null, true).then(() => {
console.log('Successfully dumped packets')
})
}
module.exports = { dumpPackets: dump, hasDumps }

37
types/Item.js Normal file
View file

@ -0,0 +1,37 @@
module.exports = (version) =>
class Item {
nbt
constructor (obj) {
this.networkId = 0
this.runtimeId = 0
this.count = 0
this.metadata = 0
Object.assign(this, obj)
this.version = version
}
static fromBedrock (obj) {
return new Item({
runtimeId: obj.runtime_id,
networkId: obj.item?.network_id,
count: obj.item?.auxiliary_value & 0xff,
metadata: obj.item?.auxiliary_value >> 8,
nbt: obj.item?.nbt?.nbt
})
}
toBedrock () {
return {
runtime_id: this.runtimeId,
item: {
network_id: this.networkId,
auxiliary_value: (this.metadata << 8) | (this.count & 0xff),
has_nbt: !!this.nbt,
nbt: { version: 1, nbt: this.nbt },
can_place_on: [],
can_destroy: [],
blocking_tick: 0
}
}
}
}