add example chunk, use protodef to implement read and write batch, try to make the chunk sending work

This commit is contained in:
Romain Beaumont 2016-04-07 00:46:40 +02:00
commit 863ddf1785
5 changed files with 69 additions and 23 deletions

BIN
examples/chunk Normal file

Binary file not shown.

View file

@ -1,6 +1,7 @@
'use strict';
var pmp = require('../');
var fs = require("fs");
if(process.argv.length !=4) {
console.log("Usage: node server.js <host> <port>");
@ -44,15 +45,18 @@ server.on('connection', function(client) {
});
client.on("mcpe_request_chunk_radius",packet => {
const chunkRadius=packet.chunk_radius;
const chunkRadius = packet.chunk_radius;
// TODO : to fix, no idea what to send
client.writeMCPE("mcpe_full_chunk_data",{
chunk_x:0,
chunk_z:0,
order:0,
chunk_data_length:8,
chunk_data:new Buffer([0,1])
});
for (let x = 5; x < 6; x++) {
for (let z = 2; z < 3; z++) {
client.writeBatch([{"name":"mcpe","params":{name:"mcpe_full_chunk_data",params:{
chunk_x: x,
chunk_z: z,
order: 1,
chunk_data:fs.readFileSync(__dirname+"/chunk")
}}}]);
}
}
});
client.on('error', function(err) {

View file

@ -4,6 +4,10 @@ const raknet = require('raknet');
const fs = require('fs');
const path = require('path');
const zlib = require('zlib');
const ProtoDef = require('protodef').ProtoDef;
const batchProto=new ProtoDef();
batchProto.addTypes(require("./datatypes/minecraft"));
batchProto.addType("insideBatch",["endOfArray",{"type":["buffer",{"countType":"i32"}]}]);
function createClient(options) {
assert.ok(options, 'options is required');
@ -41,24 +45,14 @@ function createClient(options) {
texture: fs.readFileSync(path.join(__dirname,'texture'))
}
}
)
);
});
client.on('mcpe_batch', function(packet) {
var buf = zlib.inflateSync(packet.payload);
var offset = 0;
var length = buf.length;
while(offset < length) {
var pkLength = buf.readInt32BE(offset);
offset += 4;
var packetBuffer = buf.slice(offset, pkLength);
offset += pkLength;
packetBuffer = Buffer.concat([new Buffer([0x8e]),packetBuffer]);
client.readEncapsulatedPacket(packetBuffer);
}
var packets=batchProto.parsePacketBuffer("insideBatch",buf).data;
packets.forEach(packet => client.readEncapsulatedPacket(Buffer.concat([new Buffer([0x8e]),packet])));
});
return client;

View file

@ -1,4 +1,9 @@
const raknet = require('raknet');
const zlib = require('zlib');
const ProtoDef = require('protodef').ProtoDef;
const batchProto=new ProtoDef();
batchProto.addTypes(require("./datatypes/minecraft"));
batchProto.addType("insideBatch",["endOfArray",{"type":["buffer",{"countType":"i32"}]}]);
function createServer(options) {
options = options || {};
@ -20,12 +25,21 @@ function createServer(options) {
server.on("connection", function (client) {
client.on("mcpe",packet => client.emit(packet.name,packet.params));
client.writeMCPE=(name,packet) => {
client.writeEncapsulated("mcpe",{
name:name,
params:packet
});
};
client.writeBatch=function(packets) {
const payload=zlib.deflateSync(batchProto.createPacketBuffer("insideBatch",
packets.map(packet =>
client.encapsulatedPacketSerializer.createPacketBuffer(packet).slice(1))));
client.writeMCPE("mcpe_batch",{
payload:payload
});
}
});
return server;
}

View file

@ -93,9 +93,43 @@ function writeIpAddress(value, buffer, offset) {
return offset;
}
function readEndOfArray(buffer, offset, typeArgs) {
var type=typeArgs.type;
var cursor = offset;
var elements = [];
while(cursor<buffer.length) {
var results = this.read(buffer, cursor, type, {});
elements.push(results.value);
cursor += results.size;
}
return {
value: elements,
size: cursor - offset
};
}
function writeEndOfArray(value, buffer, offset,typeArgs) {
var type=typeArgs.type;
var self = this;
value.forEach(function(item) {
offset = self.write(item, buffer, offset, type, {});
});
return offset;
}
function sizeOfEndOfArray(value, typeArgs) {
var type=typeArgs.type;
var size = 0;
for(var i = 0; i < value.length; ++i) {
size += this.sizeOf(value[i], type, {});
}
return size;
}
module.exports = {
'uuid': [readUUID, writeUUID, 16],
'nbt': [readNbt, writeNbt, sizeOfNbt],
'entityMetadataLoop': [readEntityMetadata, writeEntityMetadata, sizeOfEntityMetadata],
'ipAddress': [readIpAddress, writeIpAddress, 4]
'ipAddress': [readIpAddress, writeIpAddress, 4],
'endOfArray':[readEndOfArray,writeEndOfArray,sizeOfEndOfArray]
};