Merge pull request #33 from extremeheat/new

1.16
This commit is contained in:
Romain Beaumont 2021-03-11 13:02:48 +01:00 committed by GitHub
commit 8a6158bc2d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
54 changed files with 24429 additions and 186 deletions

8
.gitignore vendored
View file

@ -1,2 +1,8 @@
node_modules/
npm-debug.log
npm-debug.log
package-lock.json
__*
data/*.js
src/**/*.json
src/**/*.txt
dist/

3
AUTHORS.md Normal file
View file

@ -0,0 +1,3 @@
mhsjlw <mhsjlw@aol.com> @mhsjlw
Romain Beaumont <romain.rom1@gmail.com> @rom1504
Filiph Sandström <filiph.sandstrom@filfatstudios.com> @filfat

View file

@ -1,3 +1,7 @@
## 2.4.0
* Update to version 1.12.0
* Add option to provide protocol.json
## 2.2.3
* fix the use item packet

View file

@ -7,7 +7,7 @@ Parse and serialize Minecraft: Pocket Edition packets
## Features
* Supports Minecraft PE `0.14.3`
* Supports Minecraft Pocket Edition `1.0`
* Pure JavaScript
* Easily send and listen for any packet
* RakNet support through [node-raknet](https://github.com/mhsjlw/node-raknet)
@ -24,3 +24,7 @@ This project is run by these guys:
- [mhsjlw](https://github.com/mhsjlw)
- [rom1504](https://github.com/rom1504)
- [Filiph Sandström](https://github.com/filfat)
## License
Licensed under the MIT license.

BIN
data/biome_definitions.nbt Normal file

Binary file not shown.

4898
data/creativeitems.json Normal file

File diff suppressed because it is too large Load diff

60
data/new/compile.js Normal file
View file

@ -0,0 +1,60 @@
/**
* This is a utility script that converts the YAML here into ProtoDef schema code and (soon) docs/typescript definitions.
* It also pre-compiles JS code from the schema for easier development.
*
* You can run this with `npm run build`
*
*/
const fs = require('fs')
const { ProtoDefCompiler } = require('protodef').Compiler
function genProtoSchema() {
const { parse, compile } = require('protodef-yaml/compiler')
// Create the packet_map.yml from proto.yml
const parsed = parse('./proto.yml')
const packets = []
for (const key in parsed) {
if (key.startsWith('%container')) {
const [, name] = key.split(',')
if (name.startsWith('packet_')) {
const children = parsed[key]
const packetName = name.replace('packet_', '')
const packetID = children['!id']
packets.push([packetID, packetName, name])
}
}
}
let l1 = l2 = ''
for (const [id,name,fname] of packets) {
l1 += ` 0x${id.toString(16).padStart(2, '0')}: ${name}\n`
l2 += ` if ${name}: ${fname}\n`
}
const t = `!import: types.yaml\nmcpe_packet:\n name: varint =>\n${l1}\n params: name ?\n${l2}`
fs.writeFileSync('./packet_map.yml', t)
compile('./proto.yml', 'protocol.json')
}
genProtoSchema()
fs.writeFileSync('../newproto.json', JSON.stringify({ types: require('./protocol.json') }, null, 2))
fs.unlinkSync('./protocol.json') //remove temp file
function createProtocol() {
const compiler = new ProtoDefCompiler()
const protocol = require('../newproto.json').types
compiler.addTypes(require('../../src/datatypes/compiler-minecraft'))
compiler.addTypes(require('prismarine-nbt/compiler-zigzag'))
compiler.addTypesToCompile(protocol)
fs.writeFileSync('../read.js', 'module.exports = ' + compiler.readCompiler.generate())
fs.writeFileSync('../write.js', 'module.exports = ' + compiler.writeCompiler.generate())
fs.writeFileSync('../size.js', 'module.exports = ' + compiler.sizeOfCompiler.generate())
const compiledProto = compiler.compileProtoDefSync()
return compiledProto
}
console.log('Generating JS...')
createProtocol()

317
data/new/packet_map.yml Normal file
View file

@ -0,0 +1,317 @@
!import: types.yaml
mcpe_packet:
name: varint =>
0x01: login
0x02: play_status
0x03: server_to_client_handshake
0x04: client_to_server_handshake
0x05: disconnect
0x06: resource_packs_info
0x07: resource_pack_stack
0x08: resource_pack_client_response
0x09: text
0x0a: set_time
0x0b: start_game
0x0c: add_player
0x0d: add_entity
0x0e: remove_entity
0x0f: add_item_entity
0x11: take_item_entity
0x12: move_entity
0x13: move_player
0x14: rider_jump
0x15: update_block
0x16: add_painting
0x17: tick_sync
0x18: level_sound_event_old
0x19: level_event
0x1a: block_event
0x1b: entity_event
0x1c: mob_effect
0x1d: update_attributes
0x1e: inventory_transaction
0x1f: mob_equipment
0x20: mob_armor_equipment
0x21: interact
0x22: block_pick_request
0x23: entity_pick_request
0x24: player_action
0x26: hurt_armor
0x27: set_entity_data
0x28: set_entity_motion
0x29: set_entity_link
0x2a: set_health
0x2b: set_spawn_position
0x2c: animate
0x2d: respawn
0x2e: container_open
0x2f: container_close
0x30: player_hotbar
0x31: inventory_content
0x32: inventory_slot
0x33: container_set_data
0x34: crafting_data
0x35: crafting_event
0x36: gui_data_pick_item
0x37: adventure_settings
0x38: block_entity_data
0x39: player_input
0x3a: level_chunk
0x3b: set_commands_enabled
0x3c: set_difficulty
0x3d: change_dimension
0x3e: set_player_game_type
0x3f: player_list
0x40: simple_event
0x41: event
0x42: spawn_experience_orb
0x43: clientbound_map_item_data
0x44: map_info_request
0x45: request_chunk_radius
0x46: chunk_radius_update
0x47: item_frame_drop_item
0x48: game_rules_changed
0x49: camera
0x4a: boss_event
0x4b: show_credits
0x4c: available_commands
0x4d: command_request
0x4e: command_block_update
0x4f: command_output
0x50: update_trade
0x51: update_equipment
0x52: resource_pack_data_info
0x53: resource_pack_chunk_data
0x54: resource_pack_chunk_request
0x55: transfer
0x56: play_sound
0x57: stop_sound
0x58: set_title
0x59: add_behavior_tree
0x5a: structure_block_update
0x5b: show_store_offer
0x5c: purchase_receipt
0x5d: player_skin
0x5e: sub_client_login
0x5f: initiate_web_socket_connection
0x60: set_last_hurt_by
0x61: book_edit
0x62: npc_request
0x63: photo_transfer
0x64: modal_form_request
0x65: modal_form_response
0x66: server_settings_request
0x67: server_settings_response
0x68: show_profile
0x69: set_default_game_type
0x6a: remove_objective
0x6b: set_display_objective
0x6c: set_score
0x6d: lab_table
0x6e: update_block_synced
0x6f: move_entity_delta
0x70: set_scoreboard_identity
0x71: set_local_player_as_initialized
0x72: update_soft_enum
0x73: network_stack_latency
0x75: script_custom_event
0x76: spawn_particle_effect
0x77: available_entity_identifiers
0x78: level_sound_event_v2
0x79: network_chunk_publisher_update
0x7a: biome_definition_list
0x7b: level_sound_event
0x7c: level_event_generic
0x7d: lectern_update
0x7e: video_stream_connect
0x7f: add_ecs_entity
0x80: remove_ecs_entity
0x81: client_cache_status
0x82: on_screen_texture_animation
0x83: map_create_locked_copy
0x84: structure_template_data_export_request
0x85: structure_template_data_export_response
0x86: update_block_properties
0x87: client_cache_blob_status
0x88: client_cache_miss_response
0x89: education_settings
0x8b: multiplayer_settings
0x8c: settings_command
0x8d: anvil_damage
0x8e: completed_using_item
0x8f: network_settings
0x90: player_auth_input
0x91: creative_content
0x92: player_enchant_options
0x93: item_stack_request
0x94: item_stack_response
0x95: player_armor_damage
0x97: update_player_game_type
0x9a: position_tracking_db_request
0x99: position_tracking_db_broadcast
0x9c: packet_violation_warning
0x9d: motion_prediction_hints
0x9e: animate_entity
0x9f: camera_shake
0xa0: player_fog
0xa1: correct_player_move_prediction
0xa2: item_component
0xa3: filter_text_packet
params: name ?
if login: packet_login
if play_status: packet_play_status
if server_to_client_handshake: packet_server_to_client_handshake
if client_to_server_handshake: packet_client_to_server_handshake
if disconnect: packet_disconnect
if resource_packs_info: packet_resource_packs_info
if resource_pack_stack: packet_resource_pack_stack
if resource_pack_client_response: packet_resource_pack_client_response
if text: packet_text
if set_time: packet_set_time
if start_game: packet_start_game
if add_player: packet_add_player
if add_entity: packet_add_entity
if remove_entity: packet_remove_entity
if add_item_entity: packet_add_item_entity
if take_item_entity: packet_take_item_entity
if move_entity: packet_move_entity
if move_player: packet_move_player
if rider_jump: packet_rider_jump
if update_block: packet_update_block
if add_painting: packet_add_painting
if tick_sync: packet_tick_sync
if level_sound_event_old: packet_level_sound_event_old
if level_event: packet_level_event
if block_event: packet_block_event
if entity_event: packet_entity_event
if mob_effect: packet_mob_effect
if update_attributes: packet_update_attributes
if inventory_transaction: packet_inventory_transaction
if mob_equipment: packet_mob_equipment
if mob_armor_equipment: packet_mob_armor_equipment
if interact: packet_interact
if block_pick_request: packet_block_pick_request
if entity_pick_request: packet_entity_pick_request
if player_action: packet_player_action
if hurt_armor: packet_hurt_armor
if set_entity_data: packet_set_entity_data
if set_entity_motion: packet_set_entity_motion
if set_entity_link: packet_set_entity_link
if set_health: packet_set_health
if set_spawn_position: packet_set_spawn_position
if animate: packet_animate
if respawn: packet_respawn
if container_open: packet_container_open
if container_close: packet_container_close
if player_hotbar: packet_player_hotbar
if inventory_content: packet_inventory_content
if inventory_slot: packet_inventory_slot
if container_set_data: packet_container_set_data
if crafting_data: packet_crafting_data
if crafting_event: packet_crafting_event
if gui_data_pick_item: packet_gui_data_pick_item
if adventure_settings: packet_adventure_settings
if block_entity_data: packet_block_entity_data
if player_input: packet_player_input
if level_chunk: packet_level_chunk
if set_commands_enabled: packet_set_commands_enabled
if set_difficulty: packet_set_difficulty
if change_dimension: packet_change_dimension
if set_player_game_type: packet_set_player_game_type
if player_list: packet_player_list
if simple_event: packet_simple_event
if event: packet_event
if spawn_experience_orb: packet_spawn_experience_orb
if clientbound_map_item_data: packet_clientbound_map_item_data
if map_info_request: packet_map_info_request
if request_chunk_radius: packet_request_chunk_radius
if chunk_radius_update: packet_chunk_radius_update
if item_frame_drop_item: packet_item_frame_drop_item
if game_rules_changed: packet_game_rules_changed
if camera: packet_camera
if boss_event: packet_boss_event
if show_credits: packet_show_credits
if available_commands: packet_available_commands
if command_request: packet_command_request
if command_block_update: packet_command_block_update
if command_output: packet_command_output
if update_trade: packet_update_trade
if update_equipment: packet_update_equipment
if resource_pack_data_info: packet_resource_pack_data_info
if resource_pack_chunk_data: packet_resource_pack_chunk_data
if resource_pack_chunk_request: packet_resource_pack_chunk_request
if transfer: packet_transfer
if play_sound: packet_play_sound
if stop_sound: packet_stop_sound
if set_title: packet_set_title
if add_behavior_tree: packet_add_behavior_tree
if structure_block_update: packet_structure_block_update
if show_store_offer: packet_show_store_offer
if purchase_receipt: packet_purchase_receipt
if player_skin: packet_player_skin
if sub_client_login: packet_sub_client_login
if initiate_web_socket_connection: packet_initiate_web_socket_connection
if set_last_hurt_by: packet_set_last_hurt_by
if book_edit: packet_book_edit
if npc_request: packet_npc_request
if photo_transfer: packet_photo_transfer
if modal_form_request: packet_modal_form_request
if modal_form_response: packet_modal_form_response
if server_settings_request: packet_server_settings_request
if server_settings_response: packet_server_settings_response
if show_profile: packet_show_profile
if set_default_game_type: packet_set_default_game_type
if remove_objective: packet_remove_objective
if set_display_objective: packet_set_display_objective
if set_score: packet_set_score
if lab_table: packet_lab_table
if update_block_synced: packet_update_block_synced
if move_entity_delta: packet_move_entity_delta
if set_scoreboard_identity: packet_set_scoreboard_identity
if set_local_player_as_initialized: packet_set_local_player_as_initialized
if update_soft_enum: packet_update_soft_enum
if network_stack_latency: packet_network_stack_latency
if script_custom_event: packet_script_custom_event
if spawn_particle_effect: packet_spawn_particle_effect
if available_entity_identifiers: packet_available_entity_identifiers
if level_sound_event_v2: packet_level_sound_event_v2
if network_chunk_publisher_update: packet_network_chunk_publisher_update
if biome_definition_list: packet_biome_definition_list
if level_sound_event: packet_level_sound_event
if level_event_generic: packet_level_event_generic
if lectern_update: packet_lectern_update
if video_stream_connect: packet_video_stream_connect
if add_ecs_entity: packet_add_ecs_entity
if remove_ecs_entity: packet_remove_ecs_entity
if client_cache_status: packet_client_cache_status
if on_screen_texture_animation: packet_on_screen_texture_animation
if map_create_locked_copy: packet_map_create_locked_copy
if structure_template_data_export_request: packet_structure_template_data_export_request
if structure_template_data_export_response: packet_structure_template_data_export_response
if update_block_properties: packet_update_block_properties
if client_cache_blob_status: packet_client_cache_blob_status
if client_cache_miss_response: packet_client_cache_miss_response
if education_settings: packet_education_settings
if multiplayer_settings: packet_multiplayer_settings
if settings_command: packet_settings_command
if anvil_damage: packet_anvil_damage
if completed_using_item: packet_completed_using_item
if network_settings: packet_network_settings
if player_auth_input: packet_player_auth_input
if creative_content: packet_creative_content
if player_enchant_options: packet_player_enchant_options
if item_stack_request: packet_item_stack_request
if item_stack_response: packet_item_stack_response
if player_armor_damage: packet_player_armor_damage
if update_player_game_type: packet_update_player_game_type
if position_tracking_db_request: packet_position_tracking_db_request
if position_tracking_db_broadcast: packet_position_tracking_db_broadcast
if packet_violation_warning: packet_packet_violation_warning
if motion_prediction_hints: packet_motion_prediction_hints
if animate_entity: packet_animate_entity
if camera_shake: packet_camera_shake
if player_fog: packet_player_fog
if correct_player_move_prediction: packet_correct_player_move_prediction
if item_component: packet_item_component
if filter_text_packet: packet_filter_text_packet

2395
data/new/proto.yml Normal file

File diff suppressed because it is too large Load diff

923
data/new/types.yaml Normal file
View file

@ -0,0 +1,923 @@
# !StartDocs: Types
BehaviourPackInfos: []li16
uuid: string
version: string
size: lu64
content_key: string
sub_pack_name: string
content_identity: string
has_scripts: bool
TexturePackInfos: []li16
uuid: string
version: string
size: lu64
content_key: string
sub_pack_name: string
content_identity: string
has_scripts: bool
rtx_enabled: bool
ResourcePackIdVersions: []varint
# The ID of the resource pack.
uuid: string
# The version of the resource pack.
version: string
# The subpack name of the resource pack.
name: string
ResourcePackIds: string[]li16
Experiment:
name: string
enabled: bool
Experiments: Experiment[]li32
GameMode: zigzag32 =>
0: survival
1: creative
2: adventure
3: survival_spectator
4: creative_spectator
5: fallback
GameRule:
name: string
type: varint =>
1: bool
2: int
3: float
value: type?
if bool: bool
if int: zigzag32
if float: lf32
GameRules: GameRule[]varint
# CacheBlob represents a blob as used in the client side blob cache protocol. It holds a hash of its data and
# the full data of it.
Blob:
# Hash is the hash of the blob. The hash is computed using xxHash, and must be deterministic for the same
# chunk data.
hash: lu64
# Payload is the data of the blob. When sent, the client will associate the Hash of the blob with the
# Payload in it.
payload: ByteArray
BlockPalette: []varint
name: string
state: nbt
Itemstates: []varint
name: string
runtime_id: li16
component_based: bool
Item:
network_id: zigzag32
_: network_id?
if 0: void
default:
auxiliary_value: zigzag32
has_nbt: lu16 =>
0xffff: 'true'
0x0000: 'false'
nbt: has_nbt?
if true:
version: u8
nbt: nbt
default: void
can_place_on: string[]zigzag32
can_destroy: string[]zigzag32
_: network_id?
if 355:
blocking_tick: zigzag64
vec3i:
x: zigzag32
y: zigzag32
z: zigzag32
vec3u:
x: varint
y: varint
z: varint
vec3f:
x: lf32
y: lf32
z: lf32
vec2f:
x: lf32
z: lf32
MetadataDictionary: []varint
# https://github.com/pmmp/PocketMine-MP/blob/stable/src/pocketmine/entity/Entity.php#L101
key: varint =>
0: index
1: health #int (minecart/boat)
2: variant #int
3: color #byte
4: nametag #string
5: owner_eid #long
6: target_eid #long
7: air #short
8: potion_color #int (ARGB!)
9: potion_ambient #byte
10: jump_duration #long
11: hurt_time #int (minecart/boat)
12: hurt_direction #int (minecart/boat)
13: paddle_time_left #float
14: paddle_time_right #float
15: experience_value #int (xp orb)
16: minecart_display_block #int (id | (data << 16))
17: minecart_display_offset #int
18: minecart_has_display #byte (must be 1 for minecart to show block inside)
20: old_swell
21: swell_dir
22: charge_amount
23: enderman_held_runtime_id #short
24: entity_age #short
26: player_flags
27: player_index
28: player_bed_position #block coords
29: fireball_power_x #float
30: fireball_power_y
31: fireball_power_z
32: aux_power
33: fish_x
34: fish_z
35: fish_angle
36: potion_aux_value #short
37: lead_holder_eid #long
38: scale
39: interactive_tag #string
40: npc_skin_id #string
41: url_tag #string
42: max_airdata_max_air
43: mark_variant #int
44: container_type #byte
45: container_base_size #int
46: container_extra_slots_per_strength #int
47: block_target
48: wither_invulnerable_ticks #int
49: wither_target_1 #long
50: wither_target_2 #long
51: wither_target_3 #long
52: aerial_attack
53: boundingbox_width
54: boundingbox_height
55: fuse_length
56: rider_seat_position #vector3f
57: rider_rotation_locked #byte
58: rider_max_rotation #float
59: rider_min_rotation #float
60: area_effect_cloud_radius #float
61: area_effect_cloud_waiting #int
62: area_effect_cloud_particle_id #int
63: shulker_peek_id #int
64: shulker_attach_face #byte
65: shulker_attached #short
66: shulker_attach_pos
67: trading_player_eid #long
68: trading_career
69: has_command_block
70: command_block_command #string
71: command_block_last_output #string
72: command_block_track_output #byte
73: controlling_rider_seat_number #byte
74: strength #int
75: max_strength #int
76: spell_casting_color #int
77: limited_life
78: armor_stand_pose_index # int
79: ender_crystal_time_offset # int
80: always_show_nametag # byte
81: color_2 # byte
82: name_author
83: score_tag #String
84: balloon_attached_entity # long
85: pufferfish_size
86: bubble_time
87: agent
88: sitting_amount
89: sitting_amount_previous
90: eating_counter
91: flags_extended
92: laying_amount
93: laying_amount_previous
94: duration
95: spawn_time
96: change_rate
97: change_on_pickup
98: pickup_count
99: interact_text
100: trade_tier
101: max_trade_tier
102: trade_experience
103: skin_id
104: spawning_frames
105: command_block_tick_delay
106: command_block_execute_on_first_tick
107: ambient_sound_interval
108: ambient_sound_interval_range
109: ambient_sound_event_name
110: fall_damage_multiplier
111: name_raw_text
112: can_ride_target
113: low_tier_cured_discount
114: high_tier_cured_discount
115: nearby_cured_discount
116: nearby_cured_discount_timestamp
117: hitbox
118: is_buoyant
119: buoyancy_data
type: varint =>
0: byte
1: short
2: int
3: float
4: string
5: compound
6: vec3i
7: long
8: vec3f
value: type?
if byte: i8
if short: li16
if int: zigzag32
if float: lf32
if string: string
if compound: nbt
if vec3i: vec3i
if long: zigzag64
if vec3f: vec3f
Link:
ridden_entity_id: zigzag64
rider_entity_id: zigzag64
type: u8
immediate: bool
rider_initiated: bool
Links: Link[]varint
EntityAttributes: []varint
name: string
min: lf32
value: lf32
max: lf32
Rotation:
yaw: byterot
pitch: byterot
head_yaw: byterot
BlockCoordinates: # mojang...
x: zigzag32
y: varint
z: zigzag32
PlayerAttributes: []varint
min: lf32
max: lf32
current: lf32
default: lf32
name: string
Transaction:
# LegacyRequestID is an ID that is only non-zero at times when sent by the client. The server should
# always send 0 for this. When this field is not 0, the LegacySetItemSlots slice below will have values
# in it.
# LegacyRequestID ties in with the ItemStackResponse packet. If this field is non-0, the server should
# respond with an ItemStackResponse packet. Some inventory actions such as dropping an item out of the
# hotbar are still one using this packet, and the ItemStackResponse packet needs to tie in with it.
legacy_request_id: zigzag32
# `legacy_transactions` are only present if the LegacyRequestID is non-zero. These item slots inform the
# server of the slots that were changed during the inventory transaction, and the server should send
# back an ItemStackResponse packet with these slots present in it. (Or false with no slots, if rejected.)
legacy_transactions: legacy_request_id?
if 0: void
default: []varint
container_id: u8
changed_slots: []varint
slot_id: u8
transaction_type: varint =>
0: normal
1: inventory_mismatch
2: item_use
3: item_use_on_entity
4: item_release
network_ids: bool
inventory_actions: []varint
source_type: varint =>
0: container
1: global
2: world_interaction
3: creative
100: craft_slot
99999: craft
_: source_type?
if container or creative:
inventory_id: varint
if world_interaction:
flags: varint
if craft or craft_slot:
action: varint
default: void
slot: varint
old_item: Item
new_item: Item
new_item_stack_id: ../network_ids?
if true: zigzag32
default: void
transaction_data: transaction_type?
if normal or inventory_mismatch: void
# UseItemTransactionData represents an inventory transaction data object sent when the client uses an item on
# a block.
if item_use:
# ActionType is the type of the UseItem inventory transaction. It is one of the action types found above,
# and specifies the way the player interacted with the block.
action_type: varint =>
0: click_block
1: click_air
2: break_block
# BlockPosition is the position of the block that was interacted with. This is only really a correct
# block position if ActionType is not UseItemActionClickAir.
block_position: BlockCoordinates
# BlockFace is the face of the block that was interacted with. When clicking the block, it is the face
# clicked. When breaking the block, it is the face that was last being hit until the block broke.
face: varint
# HotBarSlot is the hot bar slot that the player was holding while clicking the block. It should be used
# to ensure that the hot bar slot and held item are correctly synchronised with the server.
hotbar_slot: varint
# HeldItem is the item that was held to interact with the block. The server should check if this item
# is actually present in the HotBarSlot.
held_item: Item
# Position is the position of the player at the time of interaction. For clicking a block, this is the
# position at that time, whereas for breaking the block it is the position at the time of breaking.
player_pos: vec3f
# ClickedPosition is the position that was clicked relative to the block's base coordinate. It can be
# used to find out exactly where a player clicked the block.
click_pos: vec3f
# BlockRuntimeID is the runtime ID of the block that was clicked. It may be used by the server to verify
# that the player's world client-side is synchronised with the server's.
block_runtime_id: varint
# UseItemOnEntityTransactionData represents an inventory transaction data object sent when the client uses
# an item on an entity.
if item_use_on_entity:
# TargetEntityRuntimeID is the entity runtime ID of the target that was clicked. It is the runtime ID
# that was assigned to it in the AddEntity packet.
entity_runtime_id: varint64
# ActionType is the type of the UseItemOnEntity inventory transaction. It is one of the action types
# found in the constants above, and specifies the way the player interacted with the entity.
action_type: varint =>
0: interact
1: attack
# HotBarSlot is the hot bar slot that the player was holding while clicking the entity. It should be used
# to ensure that the hot bar slot and held item are correctly synchronised with the server.
hotbar_slot: zigzag32
# HeldItem is the item that was held to interact with the entity. The server should check if this item
# is actually present in the HotBarSlot.
held_item: Item
# Position is the position of the player at the time of clicking the entity.
player_pos: vec3f
# ClickedPosition is the position that was clicked relative to the entity's base coordinate. It can be
# used to find out exactly where a player clicked the entity.
click_pos: vec3f
# ReleaseItemTransactionData represents an inventory transaction data object sent when the client releases
# the item it was using, for example when stopping while eating or stopping the charging of a bow.
if item_release:
# ActionType is the type of the ReleaseItem inventory transaction. It is one of the action types found
# in the constants above, and specifies the way the item was released.
# As of 1.13, the ActionType is always 0. This field can be ignored, because releasing food (by consuming
# it) or releasing a bow (to shoot an arrow) is essentially the same.
action_type: varint =>
0: release
1: consume
# HotBarSlot is the hot bar slot that the player was holding while releasing the item. It should be used
# to ensure that the hot bar slot and held item are correctly synchronised with the server.
hotbar_slot: zigzag32
# HeldItem is the item that was released. The server should check if this item is actually present in the
# HotBarSlot.
held_item: Item
# HeadPosition is the position of the player's head at the time of releasing the item. This is used
# mainly for purposes such as spawning eating particles at that position.
head_pos: vec3f
ItemStacks: []varint
runtime_id: zigzag32
item: Item
RecipeIngredient:
network_id: zigzag32
_: network_id?
if 0: void
default:
network_data: zigzag32
count: zigzag32
PotionTypeRecipes: []varint
input_item_id: zigzag32
input_item_meta: zigzag32
ingredient_id: zigzag32
ingredient_meta: zigzag32
output_item_id: zigzag32
output_item_meta: zigzag32
PotionContainerChangeRecipes: []varint
input_item_id: zigzag32
ingredient_id: zigzag32
output_item_id: zigzag32
Recipes: []varint
type: zigzag32 =>
'0': 'shapeless' #'ENTRY_SHAPELESS',
'1': 'shaped' #'ENTRY_SHAPED',
'2': 'furnace' # 'ENTRY_FURNACE',
# `furnace_with_metadata` is a recipe specifically used for furnace-type crafting stations. It is equal to
# `furnace`, except it has an input item with a specific metadata value, instead of any metadata value.
'3': 'furnace_with_metadata' # 'ENTRY_FURNACE_DATA', // has metadata
'4': 'multi' #'ENTRY_MULTI', //TODO
'5': 'shulker_box' #'ENTRY_SHULKER_BOX', //TODO
'6': 'shapeless_chemistry' #'ENTRY_SHAPELESS_CHEMISTRY', //TODO
'7': 'shaped_chemistry' #'ENTRY_SHAPED_CHEMISTRY', //TODO
recipe: type?
if shapeless or shulker_box or shapeless_chemistry:
recipe_id: string
input: RecipeIngredient[]varint
output: Item[]varint
uuid: uuid
block: string
priority: zigzag32
network_id: zigzag32
if shaped or shaped_chemistry:
recipe_id: string
width: zigzag32
height: zigzag32
# todo: can this become
# RecipeIngredient[$height][$width] or RecipeIngredient[]$height[]$width ?
input: []$width
_: RecipeIngredient[]$height
output: Item[]varint
uuid: uuid
block: string
priority: zigzag32
network_id: zigzag32
if furnace:
input_id: zigzag32
output: Item
block: string
if furnace_with_metadata:
input_id: zigzag32
input_meta: zigzag32
output: Item
block: string
if multi:
uuid: uuid
network_id: zigzag32
SkinImage:
width: li32
height: li32
data: string
Skin:
skin_id: string
skin_resource_pack: string
skin_data: SkinImage
animations: []li32
skin_image: SkinImage
animation_type: li32
animation_frames: lf32
expression_type: lf32
cape_data: SkinImage
geometry_data: string
animation_data: string
premium: string
persona: bool
cape_on_classic: bool
cape_id: string
full_skin_id: string
arm_size: string
skin_color: string
personal_pieces: []li32
piece_id: string
piece_type: string
pack_id: string
is_default_piece: bool
product_id: string
piece_tint_colors: []li32
piece_type: string
colors: string[]li32
PlayerRecords:
type: u8 =>
0: add
1: remove
records_count: varint
records: []$records_count
_: type?
if add:
uuid: uuid
entity_unique_id: zigzag64
username: string
xbox_user_id: string
platform_chat_id: string
build_platform: li32
skin_data: Skin
is_teacher: bool
is_host: bool
if remove:
uuid: uuid
verified: bool[]$records_count
ScoreEntries:
type: u8 =>
0: change
1: remove
entries: []varint
scoreboard_id: zigzag64
objective_name: string
score: li32
_: type?
if remove:
entry_type: i8 =>
1: player
2: entity
3: fake_player
entity_unique_id: entry_type?
if player or entity: zigzag64
custom_name: entry_type?
if fake_player: string
ScoreboardIdentityEntries:
type: i8 =>
0: TYPE_REGISTER_IDENTITY
1: TYPE_CLEAR_IDENTITY
entries: []varint
scoreboard_id: zigzag64
entity_unique_id: type ?
if TYPE_REGISTER_IDENTITY: zigzag64
default: void
Enchant:
id: u8
level: u8
EnchantOptions: []varint
cost: varint
slot_flags: li32
equip_enchants: Enchant[]varint
held_enchants: Enchant[]varint
self_enchants: Enchant[]varint
name: string
option_id: zigzag32
StackRequestSlotInfo:
container_id: u8
slot_id: u8
stack_id: zigzag32
#
ItemStackRequests: []varint
# RequestID is a unique ID for the request. This ID is used by the server to send a response for this
# specific request in the ItemStackResponse packet.
request_id: zigzag32
actions: []varint
type_id: u8 =>
# TakeStackRequestAction is sent by the client to the server to take x amount of items from one slot in a
# container to the cursor.
0: take
# PlaceStackRequestAction is sent by the client to the server to place x amount of items from one slot into
# another slot, such as when shift clicking an item in the inventory to move it around or when moving an item
# in the cursor into a slot.
1: place
# SwapStackRequestAction is sent by the client to swap the item in its cursor with an item present in another
# container. The two item stacks swap places.
2: swap
# DropStackRequestAction is sent by the client when it drops an item out of the inventory when it has its
# inventory opened. This action is not sent when a player drops an item out of the hotbar using the Q button
# (or the equivalent on mobile). The InventoryTransaction packet is still used for that action, regardless of
# whether the item stack network IDs are used or not.
3: drop
# DestroyStackRequestAction is sent by the client when it destroys an item in creative mode by moving it
# back into the creative inventory.
4: destroy
# ConsumeStackRequestAction is sent by the client when it uses an item to craft another item. The original
# item is 'consumed'.
5: consume
# CreateStackRequestAction is sent by the client when an item is created through being used as part of a
# recipe. For example, when milk is used to craft a cake, the buckets are leftover. The buckets are moved to
# the slot sent by the client here.
# Note that before this is sent, an action for consuming all items in the crafting table/grid is sent. Items
# that are not fully consumed when used for a recipe should not be destroyed there, but instead, should be
# turned into their respective resulting items.
6: create
# LabTableCombineStackRequestAction is sent by the client when it uses a lab table to combine item stacks.
7: lab_table_combine
# BeaconPaymentStackRequestAction is sent by the client when it submits an item to enable effects from a
# beacon. These items will have been moved into the beacon item slot in advance.
8: beacon_payment
# CraftRecipeStackRequestAction is sent by the client the moment it begins crafting an item. This is the
# first action sent, before the Consume and Create item stack request actions.
# This action is also sent when an item is enchanted. Enchanting should be treated mostly the same way as
# crafting, where the old item is consumed.
9: craft_recipe
# AutoCraftRecipeStackRequestAction is sent by the client similarly to the CraftRecipeStackRequestAction. The
# only difference is that the recipe is automatically created and crafted by shift clicking the recipe book.
10: craft_recipe_auto #recipe book?
# CraftCreativeStackRequestAction is sent by the client when it takes an item out fo the creative inventory.
# The item is thus not really crafted, but instantly created.
11: craft_creative
# CraftRecipeOptionalStackRequestAction is sent when using an anvil. When this action is sent, the
# CustomNames field in the respective stack request is non-empty and contains the name of the item created
# using the anvil.
12: optional
# CraftNonImplementedStackRequestAction is an action sent for inventory actions that aren't yet implemented
# in the new system. These include, for example, anvils.
13: non_implemented #anvils aren't fully implemented yet
# CraftResultsDeprecatedStackRequestAction is an additional, deprecated packet sent by the client after
# crafting. It holds the final results and the amount of times the recipe was crafted. It shouldn't be used.
# This action is also sent when an item is enchanted. Enchanting should be treated mostly the same way as
# crafting, where the old item is consumed.
14: results_deprecated
_: type_id ?
if take or place:
count: u8
source: StackRequestSlotInfo
destination: StackRequestSlotInfo
if swap:
# Source and Destination point to the source slot from which Count of the item stack were taken and the
# destination slot to which this item was moved.
source: StackRequestSlotInfo
destination: StackRequestSlotInfo
if drop:
# Count is the count of the item in the source slot that was taken towards the destination slot.
count: u8
# Source is the source slot from which items were dropped to the ground.
source: StackRequestSlotInfo
# Randomly seems to be set to false in most cases. I'm not entirely sure what this does, but this is what
# vanilla calls this field.
randomly: bool
if destroy or consume:
# Count is the count of the item in the source slot that was destroyed.
count: u8
# Source is the source slot from which items came that were destroyed by moving them into the creative
# inventory.
source: StackRequestSlotInfo
if create:
# ResultsSlot is the slot in the inventory in which the results of the crafting ingredients are to be
# placed.
result_slot_id: u8
if beacon_payment:
# PrimaryEffect and SecondaryEffect are the effects that were selected from the beacon.
primary_effect: zigzag32
secondary_effect: zigzag32
if craft_recipe or craft_recipe_auto:
# RecipeNetworkID is the network ID of the recipe that is about to be crafted. This network ID matches
# one of the recipes sent in the CraftingData packet, where each of the recipes have a RecipeNetworkID as
# of 1.16.
recipe_network_id: varint
if craft_creative:
# CreativeItemNetworkID is the network ID of the creative item that is being created. This is one of the
# creative item network IDs sent in the CreativeContent packet.
creative_item_network_id: varint32
if optional:
# For the cartography table, if a certain MULTI recipe is being called, this points to the network ID that was assigned.
recipe_network_id: varint
# Most likely the index in the request's filter strings that this action is using
filtered_string_index: li32
if non_implemented: void
if results_deprecated:
result_items: Item[]varint
times_crafted: u8
# CustomNames is a list of custom names involved in the request. This is typically filled with one string
# when an anvil is used.
# * Used for the server to determine which strings should be filtered. Used in anvils to verify a renamed item.
custom_names: string[]varint
ItemStackResponses: []varint
result: u8
request_id: varint32
containers: []varint
container_id: u8
slots: []varint
slot: u8
hotbar_slot: u8
count: u8
item_stack_id: varint32
custom_name: string
ItemComponentList: []varint
# Name is the name of the item, which is a name like 'minecraft:stick'.
name: string
# Data is a map containing the components and properties of the item.
nbt: nbt
CommandOrigin:
# Origin is one of the values above that specifies the origin of the command. The origin may change,
# depending on what part of the client actually called the command. The command may be issued by a
# websocket server, for example.
type: varint =>
0: player
1: block
2: minecart_block
3: dev_console
4: test
5: automation_player
6: client_automation
7: dedicated_server
8: entity
9: virtual
10: game_argument
11: entity_server
# UUID is the UUID of the command called. This UUID is a bit odd as it is not specified by the server. It
# is not clear what exactly this UUID is meant to identify, but it is unique for each command called.
uuid: uuid
# RequestID is an ID that identifies the request of the client. The server should send a CommandOrigin
# with the same request ID to ensure it can be matched with the request by the caller of the command.
# This is especially important for websocket servers and it seems that this field is only non-empty for
# these websocket servers.
request_id: string
# PlayerUniqueID is an ID that identifies the player, the same as the one found in the AdventureSettings
# packet. Filling it out with 0 seems to work.
# PlayerUniqueID is only written if Origin is CommandOriginDevConsole or CommandOriginTest.
player_entity_id: type?
if dev_console or test:
player_entity_id: zigzag64
# Some arbitrary definitions from CBMC, Window IDs are normally
# unique + sequential
WindowID: i8 =>
-100: drop_contents
-24: beacon
-23: trading_output
-22: trading_use_inputs
-21: trading_input_2
-20: trading_input_1
-17: enchant_output
-16: enchant_material
-15: enchant_input
-13: anvil_output
-12: anvil_result
-11: anvil_material
-10: container_input
-5: crafting_use_ingredient
-4: crafting_result
-3: crafting_remove_ingredient
-2: crafting_add_ingredient
-1: none
0: inventory
1: first
100: last
119: offhand
120: armor
121: creative
122: hotbar
123: fixed_inventory
124: ui
WindowType: u8 =>
0: container
1: workbench
2: furnace
3: enchantment
4: brewing_stand
5: anvil
6: dispenser
7: dropper
8: hopper
9: cauldron
10: minecart_chest
11: minecart_hopper
12: horse
13: beacon
14: structure_editor
15: trading
16: command_block
17: jukebox
18: armor
19: hand
20: compound_creator
21: element_constructor
22: material_reducer
23: lab_table
24: loom
25: lectern
26: grindstone
27: blast_furnace
28: smoker
29: stonecutter
30: cartography
31: hud
32: jigsaw_editor
33: smithing_table
# TODO: remove?
LegacyEntityType: li32 =>
10: chicken
11: cow
12: pig
13: sheep
14: wolf
15: villager
16: mooshroom
17: squid
18: rabbit
19: bat
20: iron_golem
21: snow_golem
22: ocelot
23: horse
24: donkey
25: mule
26: skeleton_horse
27: zombie_horse
28: polar_bear
29: llama
30: parrot
31: dolphin
32: zombie
33: creeper
34: skeleton
35: spider
36: zombie_pigman
37: slime
38: enderman
39: silverfish
40: cave_spider
41: ghast
42: magma_cube
43: blaze
44: zombie_villager
45: witch
46: stray
47: husk
48: wither_skeleton
49: guardian
50: elder_guardian
51: npc
52: wither
53: ender_dragon
54: shulker
55: endermite
56: agent # LEARN_TO_CODE_MASCOT
57: vindicator
58: phantom
61: armor_stand
62: tripod_camera
63: player
64: item
65: tnt
66: falling_block
67: moving_block
68: xp_bottle
69: xp_orb
70: eye_of_ender_signal
71: ender_crystal
72: fireworks_rocket
73: thrown_trident
74: turtle
75: cat
76: shulker_bullet
77: fishing_hook
78: chalkboard
79: dragon_fireball
80: arrow
81: snowball
82: egg
83: painting
84: minecart
85: fireball
86: splash_potion
87: ender_pearl
88: leash_knot
89: wither_skull
90: boat
91: wither_skull_dangerous
93: lightning_bolt
94: small_fireball
95: area_effect_cloud
96: hopper_minecart
97: tnt_minecart
98: chest_minecart
100: command_block_minecart
101: lingering_potion
102: llama_spit
103: evocation_fang
104: evocation_illager
105: vex
106: ice_bomb
107: balloon
108: pufferfish
109: salmon
110: drowned
111: tropicalfish
112: cod
113: panda

7157
data/newproto.json Normal file

File diff suppressed because it is too large Load diff

2517
data/protocol.json Normal file

File diff suppressed because it is too large Load diff

View file

@ -1,3 +1,4 @@
'use strict';
var pmp = require('../');
if(process.argv.length !=5) {
@ -15,7 +16,7 @@ client.on('mcpe', packet => console.log(packet));
client.on('set_spawn_position', () => {
client.writeMCPE('request_chunk_radius', {
chunk_radius:8
chunkRadius:8
});
});

View file

@ -1,3 +1,4 @@
'use strict';
var mcpe = require('../');
var Parser = require('protodef').Parser;

View file

@ -11,7 +11,7 @@ if(process.argv.length !=4) {
var server = pmp.createServer({
host: process.argv[2],
port: parseInt(process.argv[3]),
name: 'MCPE;Minecraft: PE Server;70 70;0.14.3;0;20'
name: 'MCPE;Minecraft: PE Server;81 81;0.15.0;0;20'
});
server.on('connection', function(client) {
@ -19,20 +19,21 @@ server.on('connection', function(client) {
client.on("mcpe",packet => console.log(packet));
client.on("game_login",packet => {
client.on("login_mcpe",packet => {
client.writeMCPE("player_status",{
status:0
});
client.writeMCPE('move_player', {
entity_id: [0,0],
entityId: [0,0],
x: 1,
y: 64 + 1.62,
z: 1,
yaw: 0,
head_yaw: 0,
headYaw: 0,
pitch: 0,
mode: 0,
on_ground: 1
onGround: 1
});
client.writeMCPE("start_game",{
@ -40,17 +41,17 @@ server.on('connection', function(client) {
dimension:0,
generator:1,
gamemode:1,
entity_id:[0,0],
spawn_x:1,
spawn_y:1,
spawn_z:1,
entityId:[0,0],
spawnX:1,
spawnY:1,
spawnZ:1,
x:0,
y:1+1.62,
z:0,
unknown1:0,
unknown2:0,
unknown3:0,
unknown4:""
isLoadedInCreative:0,
dayCycleStopTime:0,
eduMode:0,
worldName:""
});
client.writeMCPE('set_spawn_position', {
@ -70,19 +71,19 @@ server.on('connection', function(client) {
});
});
client.on("request_chunk_radius",() => {
client.on("chunk_radius_update",() => {
client.writeMCPE('chunk_radius_update',{
chunk_radius:1
});
for (let x = -1; x <=1; x++) {
for (let z = -1; z <=1; z++) {
client.writeBatch([{"name":"mcpe","params":{name:"full_chunk_data",params:{
chunk_x: x,
chunk_z: z,
order: 1,
chunk_data:fs.readFileSync(__dirname+"/chunk")
}}}]);
client.writeBatch([{name:"full_chunk_data",params:{
chunkX: x,
chunkZ: z,
order: 1,
chunkData:fs.readFileSync(__dirname+"/chunk")
}}]);
}
}

31
examples/server_simple.js Normal file
View file

@ -0,0 +1,31 @@
'use strict';
var pmp = require('../');
var fs = require("fs");
if(process.argv.length !=4) {
console.log("Usage: node server.js <host> <port>");
process.exit(1);
}
var server = pmp.createServer({
host: process.argv[2],
port: parseInt(process.argv[3]),
name: 'MCPE;Minecraft: PE Server;81 81;0.15.0;0;20'
});
server.on('connection', function(client) {
client.on("mcpe", packet => console.log(packet));
client.on("login_mcpe", data => {
console.log(client.displayName + '(' + client.XUID + ') ' + ' joined the game');
});
client.on('error', err => {
console.error(err);
});
client.on('end', () => {
console.log("client left");
})
});

View file

@ -1 +1,3 @@
module.exports = require('./src/index.js');
'use strict';
module.exports = require('./dist/index.js');

View file

@ -1,9 +1,10 @@
{
"name": "pocket-minecraft-protocol",
"version": "2.2.3",
"description": "Parse and serialize Minecraft PE packets",
"version": "3.0.0",
"description": "Parse and serialize Minecraft Bedrock Edition packets",
"main": "index.js",
"scripts": {
"build": "cd data/new && node compile.js",
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [
@ -11,19 +12,27 @@
"pocket-edition",
"protocol"
],
"author": "mhsjlw <mhsjlw@aol.com>",
"contributors": [
"rom1504"
],
"license": "MIT",
"dependencies": {
"minecraft-data": "^2.4.0",
"prismarine-nbt": "^1.0.0",
"protodef": "^1.2.0",
"raknet": "^1.7.3",
"uuid-1345": "^1.0.1"
"@azure/msal-node": "^1.0.0-beta.6",
"@jsprismarine/jsbinaryutils": "^2.1.8",
"@xboxreplay/xboxlive-auth": "^3.3.3",
"aes-js": "^3.1.2",
"asn1": "^0.2.4",
"bedrock-provider": "github:extremeheat/bedrock-provider",
"debug": "^4.3.1",
"ec-pem": "^0.18.0",
"jsonwebtoken": "^8.5.1",
"jsp-raknet": "github:extremeheat/raknet#client",
"minecraft-folder-path": "^1.1.0",
"prismarine-nbt": "^1.5.0",
"protodef": "github:extremeheat/node-protodef#compiler",
"raknet-native": "^0.1.0",
"uuid-1345": "^0.99.7"
},
"devDependencies": {
"mocha": "^2.5.3"
},
"devDependencies": {},
"repository": {
"type": "git",
"url": "git+https://github.com/mhsjlw/pocket-minecraft-protocol.git"

47
samples/clientTest.js Normal file
View file

@ -0,0 +1,47 @@
process.env.DEBUG = 'minecraft-protocol raknet'
const { Client } = require('../src/client')
const fs = require('fs')
// console.log = () =>
async function test() {
const client = new Client({
hostname: '127.0.0.1',
port: 19132
})
client.once('resource_packs_info', (packet) => {
client.write('resource_pack_client_response', {
response_status: 'completed',
resourcepackids: []
})
client.once('resource_pack_stack', (stack) => {
client.write('resource_pack_client_response', {
response_status: 'completed',
resourcepackids: []
})
})
// client.once('resource_packs_info', (packet) => {
// client.write('resource_pack_client_response', {
// response_status: 'completed',
// resourcepackids: []
// })
// })
client.queue('client_cache_status', { enabled: false })
client.queue('request_chunk_radius', { chunk_radius: 1 })
client.queue('tick_sync', { request_time: BigInt(Date.now()), response_time: 0n })
})
// var read = 0;
// client.on('level_chunk', (packet) => {
// read++
// fs.writeFileSync(`level_chunk-${read}.json`, JSON.stringify(packet, null, 2))
// })
}
test()

149
samples/serverTest.js Normal file
View file

@ -0,0 +1,149 @@
process.env.DEBUG = 'minecraft-protocol raknet'
const { Server } = require('../src/server')
const CreativeItems = require('../data/creativeitems.json')
const NBT = require('prismarine-nbt')
const fs = require('fs')
let server = new Server({
})
server.create('0.0.0.0', 19132)
let ran = false
server.on('connect', ({ client }) => {
/** @type {Player} */
client.on('join', () => {
console.log('Client joined', client.getData())
// ResourcePacksInfo is sent by the server to inform the client on what resource packs the server has. It
// sends a list of the resource packs it has and basic information on them like the version and description.
client.write('resource_packs_info', {
'must_accept': false,
'has_scripts': false,
'behaviour_packs': [],
'texture_packs': []
})
client.once('resource_pack_client_response', async (packet) => {
// ResourcePackStack is sent by the server to send the order in which resource packs and behaviour packs
// should be applied (and downloaded) by the client.
client.write('resource_pack_stack', {
'must_accept': false,
'behavior_packs': [],
'resource_packs': [],
'game_version': '',
'experiments': [],
'experiments_previously_used': false
})
client.once('resource_pack_client_response', async (packet) => {
})
client.write('network_settings', {
compression_threshold: 1
})
for (let i = 0; i < 3; i++) {
client.queue('inventory_slot', {"inventory_id":120,"slot":i,"uniqueid":0,"item":{"network_id":0}})
}
client.queue('inventory_transaction', require('../src/packets/inventory_transaction.json'))
client.queue('player_list', require('../src/packets/player_list.json'))
client.queue('start_game', require('../src/packets/start_game.json'))
client.queue('item_component', {"entries":[]})
client.queue('set_spawn_position', require('../src/packets/set_spawn_position.json'))
client.queue('set_time', { time: 5433771 })
client.queue('set_difficulty', { difficulty: 1 })
client.queue('set_commands_enabled', { enabled: true })
client.queue('adventure_settings', require('../src/packets/adventure_settings.json'))
client.queue('biome_definition_list', require('../src/packets/biome_definition_list.json'))
client.queue('available_entity_identifiers', require('../src/packets/available_entity_identifiers.json'))
client.queue('update_attributes', require('../src/packets/update_attributes.json'))
client.queue('creative_content', require('../src/packets/creative_content.json'))
client.queue('inventory_content', require('../src/packets/inventory_content.json'))
client.queue('player_hotbar', {"selected_slot":3,"window_id":0,"select_slot":true})
client.queue('crafting_data', require('../src/packets/crafting_data.json'))
client.queue('available_commands', require('../src/packets/available_commands.json'))
client.queue('chunk_radius_update', {"chunk_radius":5})
client.queue('set_entity_data', require('../src/packets/set_entity_data.json'))
client.queue('game_rules_changed', require('../src/packets/game_rules_changed.json'))
client.queue('respawn', {"x":646.9405517578125,"y":65.62001037597656,"z":77.86255645751953,"state":0,"runtime_entity_id":0})
for (const file of fs.readdirSync('../src/chunks')) {
const buffer = Buffer.from(fs.readFileSync('../src/chunks/' + file, 'utf8'), 'hex')
// console.log('Sending chunk', chunk)
client.sendBuffer(buffer)
}
// for (const chunk of chunks) {
// client.queue('level_chunk', chunk)
// }
setInterval(() => {
client.write('network_chunk_publisher_update', {"coordinates":{"x":646,"y":130,"z":77},"radius":64})
}, 9500)
setTimeout(() => {
client.write('play_status', { status: 'player_spawn' })
}, 8000)
// Respond to tick synchronization packets
client.on('tick_sync', ({ request_time }) => {
client.queue('tick_sync', {
request_time,
response_time: BigInt(Date.now())
})
})
})
})
})
async function sleep(ms) {
return new Promise(res => {
setTimeout(() => { res() }, ms)
})
}
// CHUNKS
// const { ChunkColumn, Version } = require('bedrock-provider')
const mcData = require('minecraft-data')('1.16')
var chunks = []
async function buildChunks() {
// "x": 40,
// "z": 4,
const stone = mcData.blocksByName.stone
for (var cx = 35; cx < 45; cx++) {
for (var cz = 0; cz < 8; cz++) {
const column = new ChunkColumn(Version.v1_2_0_bis, x, z)
for (var x = 0; x < 16; x++) {
for (var y = 0; y < 60; y++) {
for (var z = 0; z < 16; z++) {
column.setBlock(x,y,z,stone)
}
}
}
const ser = await column.networkEncodeNoCache()
chunks.push({
x:cx, z:cz, sub_chunk_count: column.sectionsLen, cache_enabled: false,
blobs: [], payload: ser
})
}
}
// console.log('Chunks',chunks)
}
// buildChunks()

108
src/auth/chains.js Normal file
View file

@ -0,0 +1,108 @@
const JWT = require('jsonwebtoken')
const constants = require('./constants')
// Refer to the docs:
// https://web.archive.org/web/20180917171505if_/https://confluence.yawk.at/display/PEPROTOCOL/Game+Packets#GamePackets-Login
function mcPubKeyToPem(mcPubKeyBuffer) {
console.log(mcPubKeyBuffer)
if (mcPubKeyBuffer[0] == '-') return mcPubKeyBuffer
let pem = '-----BEGIN PUBLIC KEY-----\n'
let base64PubKey = mcPubKeyBuffer.toString('base64')
const maxLineLength = 65
while (base64PubKey.length > 0) {
pem += base64PubKey.substring(0, maxLineLength) + '\n'
base64PubKey = base64PubKey.substring(maxLineLength)
}
pem += '-----END PUBLIC KEY-----\n'
return pem
}
function getX5U(token) {
const [header] = token.split('.')
const hdec = Buffer.from(header, 'base64').toString('utf-8')
const hjson = JSON.parse(hdec)
return hjson.x5u
}
function verifyAuth(chain) {
let data = {}
// There are three JWT tokens sent to us, one signed by the client
// one signed by Mojang with the Mojang token we have and another one
// from Xbox with addition user profile data
// We verify that at least one of the tokens in the chain has been properly
// signed by Mojang by checking the x509 public key in the JWT headers
let didVerify = false
let pubKey = mcPubKeyToPem(getX5U(chain[0])) // the first one is client signed, allow it
let finalKey = null
console.log(pubKey)
for (var token of chain) {
// const decoded = jwt.decode(token, pubKey, 'ES384')
// console.log('Decoding...', token)
const decoded = JWT.verify(token, pubKey, { algorithms: 'ES384' })
// console.log('Decoded...')
console.log('Decoded', decoded)
// Check if signed by Mojang key
const x5u = getX5U(token)
if (x5u == constants.PUBLIC_KEY && !data.extraData?.XUID) {
didVerify = true
console.log('verified with mojang key!', x5u)
}
pubKey = decoded.identityPublicKey ? mcPubKeyToPem(decoded.identityPublicKey) : x5u
finalKey = decoded.identityPublicKey || finalKey // non pem
data = { ...data, ...decoded }
}
// console.log('Result', data)
return { key: finalKey, data }
}
function verifySkin(publicKey, token) {
// console.log('token', token)
const pubKey = mcPubKeyToPem(publicKey)
const decoded = JWT.verify(token, pubKey, { algorithms: 'ES384' })
return decoded
}
function decodeLoginJWT(authTokens, skinTokens) {
const { key, data } = verifyAuth(authTokens)
const skinData = verifySkin(key, skinTokens)
return { key, userData: data, skinData }
}
function encodeLoginJWT(localChain, mojangChain) {
const chains = []
chains.push(localChain)
for (const chain of mojangChain) {
chains.push(chain)
}
return chains
}
module.exports = { encodeLoginJWT, decodeLoginJWT }
function testServer() {
const loginPacket = require('./login.json')
// console.log(loginPacket)
const authChains = JSON.parse(loginPacket.data.chain)
const skinChain = loginPacket.data.clientData
try {
var { data, chain } = decodeLoginJWT(authChains.chain, skinChain)
} catch (e) {
console.error(e)
throw new Error('Failed to verify user')
}
console.log('Authed')
// console.log(loginPacket)
}
// testServer()

3
src/auth/constants.js Normal file
View file

@ -0,0 +1,3 @@
module.exports = {
PUBLIC_KEY: 'MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8ELkixyLcwlZryUQcu1TvPOmI2B7vX83ndnWRUaXm74wFfa5f/lwQNTfrLVHa2PmenpGI6JhIMUJaWZrjmMj90NoKNFSNBuKdm8rYiXsfaz3K36x/1U26HpG0ZxK/V1V'
}

184
src/auth/encryption.js Normal file
View file

@ -0,0 +1,184 @@
const JWT = require('jsonwebtoken')
const crypto = require('crypto')
const { Ber } = require('asn1')
const ec_pem = require('ec-pem')
const SALT = '🧂'
const curve = 'secp384r1'
function Encrypt(client, server, options) {
client.ecdhKeyPair = crypto.createECDH(curve)
client.ecdhKeyPair.generateKeys()
client.clientX509 = writeX509PublicKey(client.ecdhKeyPair.getPublicKey())
function startClientboundEncryption(publicKey) {
console.warn('[encrypt] Pub key base64: ', publicKey)
const pubKeyBuf = readX509PublicKey(publicKey.key)
const alice = client.ecdhKeyPair
const alicePEM = ec_pem(alice, curve) // https://github.com/nodejs/node/issues/15116#issuecomment-384790125
const alicePEMPrivate = alicePEM.encodePrivateKey()
// Shared secret from bob's public key + our private key
client.sharedSecret = alice.computeSecret(pubKeyBuf)
// Secret hash we use for packet encryption:
// From the public key of the remote and the private key
// of the local, a shared secret is generated using ECDH.
// The secret key bytes are then computed as
// sha256(server_token + shared_secret). These secret key
// bytes are 32 bytes long.
const secretHash = crypto.createHash('sha256')
secretHash.update(SALT)
secretHash.update(client.sharedSecret)
console.log('[encrypt] Shared secret', client.sharedSecret)
client.secretKeyBytes = secretHash.digest()
console.log('[encrypt] Shared hash', client.secretKeyBytes)
const x509 = writeX509PublicKey(alice.getPublicKey())
const token = JWT.sign({
salt: toBase64(SALT),
signedToken: alice.getPublicKey('base64')
}, alicePEMPrivate, { algorithm: 'ES384', header: { x5u: x509 } })
client.write('server_to_client_handshake', {
token: token
})
// The encryption scheme is AES/CFB8/NoPadding with the
// secret key being the result of the sha256 above and
// the IV being the first 16 bytes of this secret key.
const initial = client.secretKeyBytes.slice(0, 16)
client.startEncryption(initial)
}
function startServerboundEncryption(token) {
console.warn('[encrypt] Starting serverbound encryption', token)
const jwt = token?.token
if (!jwt) {
// TODO: allow connecting to servers without encryption
throw Error('Server did not return a valid JWT, cannot start encryption!')
}
// TODO: Should we do some JWT signature validation here? Seems pointless
const alice = client.ecdhKeyPair
const [header, payload, signature] = jwt.split('.').map(k => Buffer.from(k, 'base64'))
const head = JSON.parse(String(header))
const body = JSON.parse(String(payload))
const serverPublicKey = readX509PublicKey(head.x5u)
client.sharedSecret = alice.computeSecret(serverPublicKey)
console.log('[encrypt] Shared secret', client.sharedSecret)
const salt = Buffer.from(body.salt, 'base64')
const secretHash = crypto.createHash('sha256')
secretHash.update(salt)
secretHash.update(client.sharedSecret)
client.secretKeyBytes = secretHash.digest()
console.log('[encrypt] Shared hash', client.secretKeyBytes)
const initial = client.secretKeyBytes.slice(0, 16)
client.startEncryption(initial)
// It works! First encrypted packet :)
client.write('client_to_server_handshake', {})
this.emit('join')
}
client.on('server.client_handshake', startClientboundEncryption)
client.on('client.server_handshake', startServerboundEncryption)
client.createClientChain = (mojangKey) => {
mojangKey = mojangKey || require('./constants').PUBLIC_KEY
const alice = client.ecdhKeyPair
const alicePEM = ec_pem(alice, curve) // https://github.com/nodejs/node/issues/15116#issuecomment-384790125
const alicePEMPrivate = alicePEM.encodePrivateKey()
const token = JWT.sign({
identityPublicKey: mojangKey,
certificateAuthority: true
}, alicePEMPrivate, { algorithm: 'ES384', header: { x5u: client.clientX509 } })
client.clientIdentityChain = token
client.createClientUserChain(alicePEMPrivate)
}
client.createClientUserChain = (privateKey) => {
let payload = {
ServerAddress: options.hostname,
ThirdPartyName: client.profile.name,
DeviceOS: client.session?.deviceOS || 1,
GameVersion: options.version || '1.16.201',
ClientRandomId: Date.now(), // TODO make biggeer
DeviceId: '2099de18-429a-465a-a49b-fc4710a17bb3', // TODO random
LanguageCode: 'en_GB', // TODO locale
AnimatedImageData: [],
PersonaPieces: [],
PieceTintColours: [],
SelfSignedId: '78eb38a6-950e-3ab9-b2cf-dd849e343701',
SkinId: '5eb65f73-af11-448e-82aa-1b7b165316ad.persona-e199672a8c1a87e0-0',
SkinData: 'AAAAAA==',
SkinResourcePatch: 'ewogICAiZ2VvbWV0cnkiIDogewogICAgICAiYW5pbWF0ZWRfMTI4eDEyOCIgOiAiZ2VvbWV0cnkuYW5pbWF0ZWRfMTI4eDEyOF9wZXJzb25hLWUxOTk2NzJhOGMxYTg3ZTAtMCIsCiAgICAgICJhbmltYXRlZF9mYWNlIiA6ICJnZW9tZXRyeS5hbmltYXRlZF9mYWNlX3BlcnNvbmEtZTE5OTY3MmE4YzFhODdlMC0wIiwKICAgICAgImRlZmF1bHQiIDogImdlb21ldHJ5LnBlcnNvbmFfZTE5OTY3MmE4YzFhODdlMC0wIgogICB9Cn0K',
SkinGeometryData: require('./geom'),
"SkinImageHeight": 1,
"SkinImageWidth": 1,
"ArmSize": "wide",
"CapeData": "",
"CapeId": "",
"CapeImageHeight": 0,
"CapeImageWidth": 0,
"CapeOnClassicSkin": false,
PlatformOfflineId: '',
PlatformOnlineId: '', //chat
// a bunch of meaningless junk
CurrentInputMode: 1,
DefaultInputMode: 1,
DeviceModel: '',
GuiScale: -1,
UIProfile: 0,
TenantId: '',
PremiumSkin: false,
PersonaSkin: false,
PieceTintColors: [],
SkinAnimationData: '',
ThirdPartyNameOnly: false,
"SkinColor": "#ffffcd96",
}
payload = require('./logPack.json')
const customPayload = options.userData || {}
payload = { ...payload, ...customPayload }
client.clientUserChain = JWT.sign(payload, privateKey,
{ algorithm: 'ES384', header: { x5u: client.clientX509 } })
}
}
function toBase64(string) {
return Buffer.from(string).toString('base64')
}
function readX509PublicKey(key) {
var reader = new Ber.Reader(Buffer.from(key, "base64"));
reader.readSequence();
reader.readSequence();
reader.readOID(); // Hey, I'm an elliptic curve
reader.readOID(); // This contains the curve type, could be useful
return Buffer.from(reader.readString(Ber.BitString, true)).slice(1);
}
function writeX509PublicKey(key) {
var writer = new Ber.Writer();
writer.startSequence();
writer.startSequence();
writer.writeOID("1.2.840.10045.2.1");
writer.writeOID("1.3.132.0.34");
writer.endSequence();
writer.writeBuffer(Buffer.concat([Buffer.from([0x00]), key]), Ber.BitString);
writer.endSequence();
return writer.buffer.toString("base64");
}
module.exports = {
readX509PublicKey,
writeX509PublicKey,
Encrypt
}

232
src/auth/tests/encrypt.js Normal file
View file

@ -0,0 +1,232 @@
const crypto = require('crypto')
const JWT = require('jsonwebtoken')
const constants = require('./constants')
const { Ber } = require('asn1')
const ec_pem = require('ec-pem');
// function Encrypt(client, options) {
// this.startClientboundEncryption = (pubKeyBuf) => {
// }
// client.on('start_encrypt', this.startClientboundEncryption)
// }
// module.exports = Encrypt
// Server -> Client : sent right after the client sends us a LOGIN_PACKET so
// we can start the encryption process
// @param {key} - The key from the client Login Packet final JWT chain
function startClientboundEncryption(pubKeyBuf) {
// create our ecdh keypair
const type = 'secp256k1'
const alice = crypto.createECDH(type)
const aliceKey = alice.generateKeys()
const alicePublicKey = aliceKey.toString('base64')
const alicePrivateKey = mcPubKeyToPem(alice.getPrivateKey('base64'))
// get our secret key hex encoded
// const aliceSecret = alice.computeSecret(pubKeyBuf, null, 'hex')
// (yawkat:)
// From the public key of the remote and the private key of the local,
// a shared secret is generated using ECDH. The secret key bytes are
// then computed as sha256(server_token + shared_secret). These secret
// key bytes are 32 bytes long.
const salt = Buffer.from('', 'utf-8')
let secret = crypto.createHash('sha256').update(Buffer.concat([salt, pubKeyBuf])).digest()
console.log('alice', alicePrivateKey)
const pem = mcPubKeyToPem(alice.getPrivateKey().toString('base64'))
console.log('pem', pem)
const token = JWT.sign({
salt,
signedToken: alicePublicKey
}, pem, { algorithm: 'ES384' })
console.log('Token', token)
// get our Secret Bytes from the secret key
// alice.setPrivateKey(
// crypto.createHash('sha256').update('alice', 'utf8').digest()
// )
// using (var sha = SHA256.Create())
// {
// secret = sha.ComputeHash(secretPrepend.Concat(agreement.CalculateAgreement(remotePublicKey).ToByteArrayUnsigned()).ToArray());
// }
const bob = crypto.createECDH('secp256k1');
// URI x5u = URI.create(Base64.getEncoder().encodeToString(serverKeyPair.getPublic().getEncoded()));
// JWTClaimsSet claimsSet = new JWTClaimsSet.Builder().claim("salt", Base64.getEncoder().encodeToString(token)).build();
// SignedJWT jwt = new SignedJWT(new JWSHeader.Builder(JWSAlgorithm.ES384).x509CertURL(x5u).build(), claimsSet);
// signJwt(jwt, (ECPrivateKey) serverKeyPair.getPrivate());
// return jwt;
}
function testECDH() {
const crypto = require('crypto')
const alice = crypto.createECDH('secp256k1')
const bob = crypto.createECDH('secp256k1')
// Note: This is a shortcut way to specify one of Alice's previous private
// keys. It would be unwise to use such a predictable private key in a real
// application.
alice.setPrivateKey(
crypto.createHash('sha256').update('alice', 'utf8').digest()
);
// Bob uses a newly generated cryptographically strong
// pseudorandom key pair bob.generateKeys();
const alice_secret = alice.computeSecret(bob.getPublicKey(), null, 'hex')
const bob_secret = bob.computeSecret(alice.getPublicKey(), null, 'hex')
// alice_secret and bob_secret should be the same shared secret value
console.log(alice_secret === bob_secret)
}
function testECDH2() {
const type = 'secp256k1'
const alice = crypto.createECDH(type);
const aliceKey = alice.generateKeys();
// Generate Bob's keys...
const bob = crypto.createECDH(type);
const bobKey = bob.generateKeys();
console.log("\nAlice private key:\t", alice.getPrivateKey().toString('hex'));
console.log("Alice public key:\t", aliceKey.toString('hex'))
console.log("\nBob private key:\t", bob.getPrivateKey().toString('hex'));
console.log("Bob public key:\t", bobKey.toString('hex'));
// Exchange and generate the secret...
const aliceSecret = alice.computeSecret(bobKey);
const bobSecret = bob.computeSecret(aliceKey);
console.log("\nAlice shared key:\t", aliceSecret.toString('hex'))
console.log("Bob shared key:\t\t", bobSecret.toString('hex'));
//wow it actually works?!
}
function mcPubKeyToPem(mcPubKeyBuffer) {
console.log(mcPubKeyBuffer)
if (mcPubKeyBuffer[0] == '-') return mcPubKeyBuffer
let pem = '-----BEGIN PUBLIC KEY-----\n'
let base64PubKey = mcPubKeyBuffer.toString('base64')
const maxLineLength = 65
while (base64PubKey.length > 0) {
pem += base64PubKey.substring(0, maxLineLength) + '\n'
base64PubKey = base64PubKey.substring(maxLineLength)
}
pem += '-----END PUBLIC KEY-----\n'
return pem
}
function readX509PublicKey(key) {
var reader = new Ber.Reader(Buffer.from(key, "base64"));
reader.readSequence();
reader.readSequence();
reader.readOID(); // Hey, I'm an elliptic curve
reader.readOID(); // This contains the curve type, could be useful
return Buffer.from(reader.readString(Ber.BitString, true)).slice(1);
}
function testMC() {
// const pubKeyBuf = Buffer.from(constants.PUBLIC_KEY, 'base64')
// const pem = mcPubKeyToPem(pubKeyBuf)
// console.log(mcPubKeyToPem(pubKeyBuf))
// const publicKey = crypto.createPublicKey({ key: pem, format: 'der' })
const pubKeyBuf = readX509PublicKey(constants.PUBLIC_KEY)
// console.log('Mojang pub key', pubKeyBuf.toString('hex'), publicKey)
startClientboundEncryption(pubKeyBuf)
}
function testMC2() {
// const mojangPubKeyBuf = Buffer.from('MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8ELkixyLcwlZryUQcu1TvPOmI2B7vX83ndnWRUaXm74wFfa5f/lwQNTfrLVHa2PmenpGI6JhIMUJaWZrjmMj90NoKNFSNBuKdm8rYiXsfaz3K36x/1U26HpG0ZxK/V1V', 'base64')
// const pem = mcPubKeyToPem(mojangPubKeyBuf)
// const publicKey = crypto.createPublicKey({ key: pem })
const publicKey = readX509PublicKey(constants.PUBLIC_KEY)
const curve = 'secp384r1'
const alice = crypto.createECDH(curve)
// const keys = crypto.generateKeyPair('ec',)
// const bob = crypto.generateKeyPairSync('ec', {
// namedCurve: type
// })
// alice.setPrivateKey(bob.privateKey.export({ type: 'pkcs8', format: 'pem' }))
// alice.setPublicKey(bob.publicKey.export({ type: 'spki', format: 'pem' }))
// console.log(bob)
const aliceKey = alice.generateKeys()
const alicePEM = ec_pem(alice, curve)
const alicePEMPrivate = alicePEM.encodePrivateKey()
const alicePEMPublic = alicePEM.encodePublicKey()
// const alicePublicKey = aliceKey.toString('base64')
// const alicePrivateKey = alice.getPrivateKey().toString('base64')
const aliceSecret = alice.computeSecret(publicKey, null, 'hex')
console.log('Alice private key PEM', alicePEMPrivate)
console.log('Alice public key PEM', alicePEMPublic)
console.log('Alice public key', alice.getPublicKey('base64'))
console.log('Alice secret key', aliceSecret)
var sign = crypto.createSign('RSA-SHA256')
sign.write('something')
sign.end()
// // const pem2 =
// // `-----BEGIN PRIVATE KEY-----
// // ${alice.getPrivateKey('base64')}
// // -----END PRIVATE KEY-----`
// console.log('PEM', bob.privateKey)
const sig = sign.sign(alicePEMPrivate, 'hex')
console.log('Signature', sig)
const token = JWT.sign({
salt: 'HELLO',
signedToken: alice.getPublicKey('base64')
}, alicePEMPrivate, { algorithm: 'ES384' })
console.log('Token', token)
const verified = JWT.verify(token, alicePEMPublic, { algorithms: 'ES384' })
console.log('Verified!', verified)
}
function testMC3() {
var Ber = require('asn1').Ber;
var reader = new Ber.Reader(new Buffer(constants.PUBLIC_KEY, "base64"));
reader.readSequence();
reader.readSequence();
reader.readOID(); // Hey, I'm an elliptic curve
reader.readOID(); // This contains the curve type, could be useful
var pubKey = reader.readString(Ber.BitString, true).slice(1);
var server = crypto.createECDH('secp384r1');
server.generateKeys();
console.log(server.computeSecret(pubKey));
}
// testECDH2()
testMC2()

View file

@ -0,0 +1,88 @@
const crypto = require('crypto')
const JWT = require('jsonwebtoken')
const constants = require('./constants')
const { Ber } = require('asn1')
const ec_pem = require('ec-pem')
function readX509PublicKey(key) {
var reader = new Ber.Reader(Buffer.from(key, "base64"));
reader.readSequence();
reader.readSequence();
reader.readOID(); // Hey, I'm an elliptic curve
reader.readOID(); // This contains the curve type, could be useful
return Buffer.from(reader.readString(Ber.BitString, true)).slice(1);
}
function writeX509PublicKey(key) {
var writer = new Ber.Writer();
writer.startSequence();
writer.startSequence();
writer.writeOID("1.2.840.10045.2.1");
writer.writeOID("1.3.132.0.34");
writer.endSequence();
writer.writeBuffer(Buffer.concat([Buffer.from([0x00]), key]), Ber.BitString);
writer.endSequence();
return writer.buffer.toString("base64");
}
function test(pubKey = constants.PUBLIC_KEY) {
const publicKey = readX509PublicKey(pubKey)
const curve = 'secp384r1'
const alice = crypto.createECDH(curve)
const aliceKey = alice.generateKeys()
const alicePEM = ec_pem(alice, curve)
const alicePEMPrivate = alicePEM.encodePrivateKey()
const alicePEMPublic = alicePEM.encodePublicKey()
const aliceSecret = alice.computeSecret(publicKey, null, 'hex')
console.log('Alice private key PEM', alicePEMPrivate)
console.log('Alice public key PEM', alicePEMPublic)
console.log('Alice public key', alice.getPublicKey('hex'))
console.log('Alice secret key', aliceSecret)
// Test signing manually
const sign = crypto.createSign('RSA-SHA256')
sign.write('🧂')
sign.end()
const sig = sign.sign(alicePEMPrivate, 'hex')
console.log('Signature', sig)
// Test JWT sign+verify
const x509 = writeX509PublicKey(alice.getPublicKey())
const token = JWT.sign({
salt: 'HELLO',
signedToken: alice.getPublicKey('base64')
}, alicePEMPrivate, { algorithm: 'ES384', header: { x5u: x509 } })
console.log('Encoded JWT', token)
// send the jwt to the client...
const verified = JWT.verify(token, alicePEMPublic, { algorithms: 'ES384' })
console.log('Decoded JWT', verified)
// Good
}
/**
* Alice private key PEM -----BEGIN EC PRIVATE KEY-----
MIGkAgEBBDBGgHZwH3BzieyJrdrVTVLmrEoUxpDUSqSzS98lobTXeUxJR/OmywPV
57I8YtnsJlCgBwYFK4EEACKhZANiAATjvTRgjsxKruO7XbduSQoHeR/6ouIm4Rmc
La9EkSpLFpuYZfsdtq9Vcf2t3Q3+jIbXjD/wNo97P4Hr5ghXG8sCVV7jpqadOF8j
SzyfajLGfX9mkS5WWLAg+dpi/KiEo/g=
-----END EC PRIVATE KEY-----
Alice public key PEM -----BEGIN PUBLIC KEY-----
MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE4700YI7MSq7ju123bkkKB3kf+qLiJuEZ
nC2vRJEqSxabmGX7HbavVXH9rd0N/oyG14w/8DaPez+B6+YIVxvLAlVe46amnThf
I0s8n2oyxn1/ZpEuVliwIPnaYvyohKP4
-----END PUBLIC KEY-----
Alice public key 04e3bd34608ecc4aaee3bb5db76e490a07791ffaa2e226e1199c2daf44912a4b169b9865fb1db6af5571fdaddd0dfe8c86d78c3ff0368f7b3f81ebe608571bcb02555ee3a6a69d385f234b3c9f6a32c67d7f66912e5658b020f9da62fca884a3f8
Alice secret key 76feb5d420b33907c4841a74baa707b717a29c021b17b6662fd46dba3227cac3e256eee9e890edb0308f66a3119b4914
Signature 3066023100d5ea70b8fc5e441c5e93d9f7dcde031f54291011c950a4aa8625ea9b27f7c798a8bc4de40baf35d487a05db6b5c628c6023100ae06cc2ea65db77138163c546ccf13933faae3d91bd6aa7108b99539cdb1c86f1e8a3704cb099f0b00eebed4ee75ccb2
Encoded JWT eyJhbGciOiJFUzM4NCIsInR5cCI6IkpXVCJ9.eyJzYWx0IjoiSEVMTE8iLCJzaWduZWRUb2tlbiI6IkJPTzlOR0NPekVxdTQ3dGR0MjVKQ2dkNUgvcWk0aWJoR1p3dHIwU1JLa3NXbTVobCt4MjJyMVZ4L2EzZERmNk1odGVNUC9BMmozcy9nZXZtQ0ZjYnl3SlZYdU9tcHAwNFh5TkxQSjlxTXNaOWYyYVJMbFpZc0NENTJtTDhxSVNqK0E9PSIsImlhdCI6MTYxMTc4MDYwNX0._g8k086U7nD-Tthn8jGWuuM3Q4EfhgqCfFA1Q5ePmjqhqMHOJvmrCz6tWsCytr2i-a2M51fb9K_YDAHbZ66Kos9ZkjF4Tqz5fPS880fM9woZ_1xjh7nGcOQ6sbY81zyi
Decoded JWT {
salt: 'HELLO',
signedToken: 'BOO9NGCOzEqu47tdt25JCgd5H/qi4ibhGZwtr0SRKksWm5hl+x22r1Vx/a3dDf6MhteMP/A2j3s/gevmCFcbywJVXuOmpp04XyNLPJ9qMsZ9f2aRLlZYsCD52mL8qISj+A==',
iat: 1611780605
}
*/
test()

49
src/auth/tests/jwtTest.js Normal file
View file

@ -0,0 +1,49 @@
function test() {
const chain = require('./sampleChain.json').chain
let data = {}
// There are three JWT tokens sent to us, one signed by the client
// one signed by Mojang with the Mojang token we have and another one
// from Xbox with addition user profile data
// We verify that at least one of the tokens in the chain has been properly
// signed by Mojang by checking the x509 public key in the JWT headers
let didVerify = false
let pubKey = mcPubKeyToPem(constants.PUBLIC_KEY_NEW)
console.log(pubKey)
for (var token of chain) {
// const decoded = jwt.decode(token, pubKey, 'ES384')
console.log('Decoding...', token)
const decoded = JWT.verify(token, pubKey, { algorithms: 'ES384' })
console.log('Decoded...')
console.log('Decoded', decoded)
// Check if signed by Mojang key
const [header] = token.split('.')
const hdec = Buffer.from(header, 'base64').toString('utf-8')
const hjson = JSON.parse(hdec)
if (hjson.x5u == constants.PUBLIC_KEY && !data.extraData?.XUID) {
didVerify = true
console.log('verified with mojang key!', hjson.x5u)
}
pubKey = mcPubKeyToPem(decoded.identityPublicKey)
data = { ...data, ...decoded }
}
console.log('Result', data)
}
function test2() {
const chain = require('./login.json')
const token = chain.data.clientData
// console.log(token)
const pubKey = mcPubKeyToPem(constants.CDATA_PUBLIC_KEY)
const decoded = JWT.verify(token, pubKey, { algorithms: 'ES384' })
// console.log('Decoded', decoded)
fs.writeFileSync('clientData.json', JSON.stringify(decoded))
}

168
src/client.js Normal file
View file

@ -0,0 +1,168 @@
const fs = require('fs')
const debug = require('debug')('minecraft-protocol')
const { Connection } = require('./connection')
const { createDeserializer, createSerializer } = require('./transforms/serializer')
const { Encrypt } = require('./auth/encryption')
const auth = require('./client/auth')
const Options = require('./options')
const { RakClient } = require('./Rak')
class Client extends Connection {
/** @param {{ version: number, hostname: string, port: number }} options */
constructor(options) {
super()
this.options = { ...Options.defaultOptions, ...options }
this.serializer = createSerializer()
this.deserializer = createDeserializer()
this.validateOptions()
Encrypt(this, null, options)
if (options.password) {
auth.authenticatePassword(this, options)
} else {
auth.authenticateDeviceCode(this, options)
}
this.on('session', this.connect)
this.startQueue()
this.inLog = (...args) => console.info('C ->', ...args)
this.outLog = (...args) => console.info('C <-', ...args)
// this.on('decrypted', this.onDecryptedPacket)
}
validateOptions() {
// console.log('Options', this.options)
if (!this.options.hostname || this.options.port == null) throw Error('Invalid hostname/port')
if (this.options.version < Options.MIN_VERSION) {
throw new Error(`Unsupported protocol version < ${Options.MIN_VERSION} : ${this.options.version}`)
}
}
onEncapsulated = (encapsulated, inetAddr) => {
// log(inetAddr.address, ': Encapsulated', encapsulated)
const buffer = Buffer.from(encapsulated.buffer)
this.handle(buffer)
}
connect = async (sessionData) => {
const hostname = this.options.hostname || '127.0.0.1'
const port = this.options.port || 19132
this.connection = new RakClient({ useWorkers: true, hostname, port })
this.connection.onConnected = () => this.sendLogin()
this.connection.onEncapsulated = this.onEncapsulated
this.connection.connect()
}
sendLogin() {
this.createClientChain()
const chain = [
this.clientIdentityChain, // JWT we generated for auth
...this.accessToken // Mojang + Xbox JWT from auth
]
const encodedChain = JSON.stringify({ chain })
// const skinChain = JSON.stringify({})
const bodyLength = this.clientUserChain.length + encodedChain.length + 8
debug('Auth chain', chain)
this.write('login', {
protocol_version: this.options.version,
payload_size: bodyLength,
chain: encodedChain,
client_data: this.clientUserChain
})
this.emit('loggingIn')
}
onDisconnectRequest(packet) {
// We're talking over UDP, so there is no connection to close, instead
// we stop communicating with the server
console.warn(`Server requested ${packet.hide_disconnect_reason ? 'silent disconnect' : 'disconnect'}: ${packet.message}`)
process.exit(1)
}
close() {
console.warn('Close not implemented!!')
}
tryRencode(name, params, actual) {
const packet = this.serializer.createPacketBuffer({ name, params })
console.assert(packet.toString('hex') == actual.toString('hex'))
if (packet.toString('hex') !== actual.toString('hex')) {
const ours = packet.toString('hex').match(/.{1,16}/g).join('\n')
const theirs = actual.toString('hex').match(/.{1,16}/g).join('\n')
fs.writeFileSync('ours.txt', ours)
fs.writeFileSync('theirs.txt', theirs)
fs.writeFileSync('ours.json', serialize(params))
fs.writeFileSync('theirs.json', serialize(this.deserializer.parsePacketBuffer(packet).data.params))
throw new Error(name + ' Packet comparison failed!')
}
}
readPacket(packet) {
// console.log('packet', packet)
const des = this.deserializer.parsePacketBuffer(packet)
const pakData = { name: des.data.name, params: des.data.params }
this.inLog('-> C', pakData.name/*, serialize(pakData.params).slice(0, 100)*/)
// No idea what this exotic 0xA0 packet is, it's not implemented anywhere
// and seems empty. Possible gibberish from the raknet impl
if (pakData.name == '160' || !pakData.name) { // eslint-ignore-line
console.warn('?? Ignoring extraneous packet ', des)
return
}
// Packet verifying (decode + re-encode + match test)
if (pakData.name) {
this.tryRencode(pakData.name, pakData.params, packet)
}
// console.info('->', JSON.stringify(pakData, (k,v) => typeof v == 'bigint' ? v.toString() : v))
// Packet dumping
try {
if (!fs.existsSync(`./packets/${pakData.name}.json`)) {
fs.writeFileSync(`./packets/${pakData.name}.json`, serialize(pakData.params, 2))
fs.writeFileSync(`./packets/${pakData.name}.txt`, packet.toString('hex'))
}
} catch { }
switch (des.data.name) {
case 'server_to_client_handshake':
this.emit('client.server_handshake', des.data.params)
break
case 'disconnect': // Client kicked
this.onDisconnectRequest(des.data.params)
break
case 'crafting_data':
fs.writeFileSync('crafting.json', JSON.stringify(des.data.params, (k, v) => typeof v == 'bigint' ? v.toString() : v))
break
case 'start_game':
fs.writeFileSync('start_game.json', JSON.stringify(des.data.params, (k, v) => typeof v == 'bigint' ? v.toString() : v))
break
case 'level_chunk':
// fs.writeFileSync(`./chunks/chunk-${chunks++}.txt`, packet.toString('hex'))
break
default:
// console.log('Sending to listeners')
}
this.emit(des.data.name, des.data.params)
}
}
var chunks = 0;
function serialize(obj = {}, fmt) {
return JSON.stringify(obj, (k, v) => typeof v == 'bigint' ? v.toString() : v, fmt)
}
module.exports = { Client }

103
src/client/auth.js Normal file
View file

@ -0,0 +1,103 @@
const XboxLiveAuth = require('@xboxreplay/xboxlive-auth')
const debug = require('debug')('minecraft-protocol')
const fetch = require('node-fetch')
const authConstants = require('./authConstants')
const { MsAuthFlow } = require('./authFlow.js')
const getFetchOptions = {
headers: {
'Content-Type': 'application/json',
'User-Agent': 'node-minecraft-protocol'
}
}
/**
* Obtains Minecaft profile data using a Minecraft access token and starts the join sequence
* @param {object} client - The client passed to protocol
* @param {object} options - Client Options
* @param {string} chains - Minecraft JWTs to send to server
*/
async function postAuthenticate (client, options, chains) {
// First chain is Mojang stuff, second is Xbox profile data used by mc
const jwt = chains[1]
const [header, payload, signature] = jwt.split('.').map(k => Buffer.from(k, 'base64'))
const xboxProfile = JSON.parse(String(payload))
// This profile / session here could be simplified down to where it just passes the uuid of the player to encrypt.js
// That way you could remove some lines of code. It accesses client.session.selectedProfile.id so /shrug.
// - Kashalls
const profile = {
name: xboxProfile?.extraData?.displayName || 'Player',
uuid: xboxProfile?.extraData?.identity || 'adfcf5ca-206c-404a-aec4-f59fff264c9b', //random
xuid: xboxProfile?.extraData?.XUID || 0
}
client.profile = profile
client.username = profile.name
client.accessToken = chains
client.emit('session', profile)
}
/**
* Authenticates with Mincrosoft through user credentials, then
* with Xbox Live, Minecraft, checks entitlements and returns profile
*
* @function
* @param {object} client - The client passed to protocol
* @param {object} options - Client Options
*/
async function authenticatePassword (client, options) {
throw Error('Not implemented')
}
/**
* Authenticates to Minecraft via device code based Microsoft auth,
* then connects to the specified server in Client Options
*
* @function
* @param {object} client - The client passed to protocol
* @param {object} options - Client Options
*/
async function authenticateDeviceCode (client, options) {
try {
const flow = new MsAuthFlow(options.username, options.profilesFolder, options.onMsaCode)
const chain = await flow.getMinecraftToken(client.clientX509)
// console.log('Chain', chain)
await postAuthenticate(client, options, chain)
} catch (err) {
console.error(err)
client.emit('error', err)
}
}
function checkStatus (res) {
if (res.ok) { // res.status >= 200 && res.status < 300
return res.json()
} else {
throw Error(res.statusText)
}
}
module.exports = {
authenticatePassword,
authenticateDeviceCode
}
async function msaTest () {
// MsAuthFlow.resetTokenCaches()
await authenticateDeviceCode({
connect(...args) {
console.log('Connecting', args)
},
emit(...e) {
console.log('Event', e)
}
}, {})
}
// debug with node microsoftAuth.js
if (!module.parent) {
msaTest()
}

View file

@ -0,0 +1,4 @@
module.exports = {
XSTSRelyingParty: 'https://multiplayer.minecraft.net/',
MinecraftAuth: 'https://multiplayer.minecraft.net/authentication'
}

134
src/client/authFlow.js Normal file
View file

@ -0,0 +1,134 @@
const crypto = require('crypto')
const path = require('path')
const fs = require('fs')
const debug = require('debug')('minecraft-protocol')
const mcDefaultFolderPath = require('minecraft-folder-path')
const authConstants = require('./authConstants')
const { MsaTokenManager, XboxTokenManager, MinecraftTokenManager } = require('./tokens')
// Initialize msal
// Docs: https://github.com/AzureAD/microsoft-authentication-library-for-js/blob/dev/lib/msal-common/docs/request.md#public-apis-1
const msalConfig = {
auth: {
// the minecraft client:
// clientId: "000000004C12AE6F",
clientId: '389b1b32-b5d5-43b2-bddc-84ce938d6737', // token from https://github.com/microsoft/Office365APIEditor
authority: 'https://login.microsoftonline.com/consumers',
}
}
async function retry (methodFn, beforeRety, times) {
while (times--) {
if (times !== 0) {
try { return await methodFn() } catch (e) { debug(e) }
await beforeRety()
} else {
return await methodFn()
}
}
}
class MsAuthFlow {
constructor (username, cacheDir, codeCallback) {
this.initTokenCaches(username, cacheDir)
this.codeCallback = codeCallback
}
initTokenCaches (username, cacheDir) {
const hash = sha1(username).substr(0, 6)
let cachePath = cacheDir || mcDefaultFolderPath
try {
if (!fs.existsSync(cachePath + '/nmp-cache')) {
fs.mkdirSync(cachePath + '/nmp-cache')
}
cachePath += '/nmp-cache'
} catch (e) {
console.log('Failed to open cache dir', e)
cachePath = __dirname
}
const cachePaths = {
msa: path.join(cachePath, `./${hash}_msa-cache.json`),
xbl: path.join(cachePath, `./${hash}_xbl-cache.json`),
bed: path.join(cachePath, `./${hash}_bed-cache.json`)
}
const scopes = ['XboxLive.signin', 'offline_access']
this.msa = new MsaTokenManager(msalConfig, scopes, cachePaths.msa)
this.xbl = new XboxTokenManager(authConstants.XSTSRelyingParty, cachePaths.xbl)
this.mca = new MinecraftTokenManager(cachePaths.bed)
}
static resetTokenCaches (cacheDir) {
let cachePath = cacheDir || mcDefaultFolderPath
try {
if (fs.existsSync(cachePath + '/nmp-cache')) {
cachePath += '/nmp-cache'
fs.rmdirSync(cachePath, { recursive: true })
return true
}
} catch (e) {
console.log('Failed to clear cache dir', e)
return false
}
}
async getMsaToken () {
if (await this.msa.verifyTokens()) {
debug('[msa] Using existing tokens')
return this.msa.getAccessToken().token
} else {
debug('[msa] No valid cached tokens, need to sign in')
const ret = await this.msa.authDeviceCode((response) => {
console.info('[msa] First time signing in. Please authenticate now:')
console.info(response.message)
if (this.codeCallback) this.codeCallback(response)
})
console.info(`[msa] Signed in as ${ret.account.username}`)
debug('[msa] got auth result', ret)
return ret.accessToken
}
}
async getXboxToken () {
if (await this.xbl.verifyTokens()) {
debug('[xbl] Using existing tokens')
return this.xbl.getCachedXstsToken().data
} else {
debug('[xbl] Need to obtain tokens')
return await retry(async () => {
const msaToken = await this.getMsaToken()
const ut = await this.xbl.getUserToken(msaToken)
const xsts = await this.xbl.getXSTSToken(ut)
return xsts
}, () => { this.msa.forceRefresh = true }, 2)
}
}
async getMinecraftToken (publicKey) {
// TODO: Fix cache, in order to do cache we also need to cache the ECDH keys so disable it
// is this even a good idea to cache?
if (await this.mca.verifyTokens() && false) {
debug('[mc] Using existing tokens')
return this.mca.getCachedAccessToken().chain
} else {
if (!publicKey) throw new Error('Need to specifiy a ECDH x509 URL encoded public key')
debug('[mc] Need to obtain tokens')
return await retry(async () => {
const xsts = await this.getXboxToken()
debug('[xbl] xsts data', xsts)
const token = await this.mca.getAccessToken(publicKey, xsts)
return token.chain
}, () => { this.xbl.forceRefresh = true }, 2)
}
}
}
function sha1 (data) {
return crypto.createHash('sha1').update(data || '', 'binary').digest('hex')
}
module.exports = { MsAuthFlow }

299
src/client/tokens.js Normal file
View file

@ -0,0 +1,299 @@
const msal = require('@azure/msal-node')
const XboxLiveAuth = require('@xboxreplay/xboxlive-auth')
const debug = require('debug')('minecraft-protocol')
const fs = require('fs')
const path = require('path')
const fetch = require('node-fetch')
const authConstants = require('./authConstants')
// Manages Microsoft account tokens
class MsaTokenManager {
constructor(msalConfig, scopes, cacheLocation) {
this.msaClientId = msalConfig.auth.clientId
this.scopes = scopes
this.cacheLocation = cacheLocation || path.join(__dirname, './msa-cache.json')
try {
this.msaCache = require(this.cacheLocation)
} catch (e) {
this.msaCache = {}
fs.writeFileSync(this.cacheLocation, JSON.stringify(this.msaCache))
}
const beforeCacheAccess = async (cacheContext) => {
cacheContext.tokenCache.deserialize(await fs.promises.readFile(this.cacheLocation, 'utf-8'))
}
const afterCacheAccess = async (cacheContext) => {
if (cacheContext.cacheHasChanged) {
await fs.promises.writeFile(this.cacheLocation, cacheContext.tokenCache.serialize())
}
}
const cachePlugin = {
beforeCacheAccess,
afterCacheAccess
}
msalConfig.cache = {
cachePlugin
}
this.msalApp = new msal.PublicClientApplication(msalConfig)
this.msalConfig = msalConfig
}
getUsers() {
const accounts = this.msaCache.Account
const users = []
if (!accounts) return users
for (const account of Object.values(accounts)) {
users.push(account)
}
return users
}
getAccessToken() {
const tokens = this.msaCache.AccessToken
if (!tokens) return
const account = Object.values(tokens).filter(t => t.client_id === this.msaClientId)[0]
if (!account) {
debug('[msa] No valid access token found', tokens)
return
}
const until = new Date(account.expires_on * 1000) - Date.now()
const valid = until > 1000
return { valid, until: until, token: account.secret }
}
getRefreshToken() {
const tokens = this.msaCache.RefreshToken
if (!tokens) return
const account = Object.values(tokens).filter(t => t.client_id === this.msaClientId)[0]
if (!account) {
debug('[msa] No valid refresh token found', tokens)
return
}
return { token: account.secret }
}
async refreshTokens() {
const rtoken = this.getRefreshToken()
if (!rtoken) {
throw new Error('Cannot refresh without refresh token')
}
const refreshTokenRequest = {
refreshToken: rtoken.token,
scopes: this.scopes
}
return new Promise((resolve, reject) => {
this.msalApp.acquireTokenByRefreshToken(refreshTokenRequest).then((response) => {
debug('[msa] refreshed token', JSON.stringify(response))
resolve(response)
}).catch((error) => {
debug('[msa] failed to refresh', JSON.stringify(error))
reject(error)
})
})
}
async verifyTokens() {
const at = this.getAccessToken()
const rt = this.getRefreshToken()
if (!at || !rt || this.forceRefresh) {
return false
}
debug('[msa] have at, rt', at, rt)
if (at.valid && rt) {
return true
} else {
try {
await this.refreshTokens()
return true
} catch (e) {
return false
}
}
}
// Authenticate with device_code flow
async authDeviceCode(dataCallback) {
const deviceCodeRequest = {
deviceCodeCallback: (resp) => {
debug('[msa] device_code response: ', resp)
dataCallback(resp)
},
scopes: this.scopes
}
return new Promise((resolve, reject) => {
this.msalApp.acquireTokenByDeviceCode(deviceCodeRequest).then((response) => {
debug('[msa] device_code resp', JSON.stringify(response))
if (!this.msaCache.Account) this.msaCache.Account = { '': response.account }
resolve(response)
}).catch((error) => {
console.warn('[msa] Error getting device code')
console.debug(JSON.stringify(error))
reject(error)
})
})
}
}
// Manages Xbox Live tokens for xboxlive.com
class XboxTokenManager {
constructor(relyingParty, cacheLocation) {
this.relyingParty = relyingParty
this.cacheLocation = cacheLocation || path.join(__dirname, './xbl-cache.json')
try {
this.cache = require(this.cacheLocation)
} catch (e) {
this.cache = {}
}
}
getCachedUserToken() {
const token = this.cache.userToken
if (!token) return
const until = new Date(token.NotAfter)
const dn = Date.now()
const remainingMs = until - dn
const valid = remainingMs > 1000
return { valid, token: token.Token, data: token }
}
getCachedXstsToken() {
const token = this.cache.xstsToken
if (!token) return
const until = new Date(token.expiresOn)
const dn = Date.now()
const remainingMs = until - dn
const valid = remainingMs > 1000
return { valid, token: token.XSTSToken, data: token }
}
setCachedUserToken(data) {
this.cache.userToken = data
fs.writeFileSync(this.cacheLocation, JSON.stringify(this.cache))
}
setCachedXstsToken(data) {
this.cache.xstsToken = data
fs.writeFileSync(this.cacheLocation, JSON.stringify(this.cache))
}
async verifyTokens() {
const ut = this.getCachedUserToken()
const xt = this.getCachedXstsToken()
if (!ut || !xt || this.forceRefresh) {
return false
}
debug('[xbl] have user, xsts', ut, xt)
if (ut.valid && xt.valid) {
return true
} else if (ut.valid && !xt.valid) {
try {
await this.getXSTSToken(ut.data)
return true
} catch (e) {
return false
}
}
return false
}
async getUserToken(msaAccessToken) {
debug('[xbl] obtaining xbox token with ms token', msaAccessToken)
if (!msaAccessToken.startsWith('d=')) { msaAccessToken = 'd=' + msaAccessToken }
const xblUserToken = await XboxLiveAuth.exchangeRpsTicketForUserToken(msaAccessToken)
this.setCachedUserToken(xblUserToken)
debug('[xbl] user token:', xblUserToken)
return xblUserToken
}
async getXSTSToken(xblUserToken) {
debug('[xbl] obtaining xsts token with xbox user token', xblUserToken.Token)
const xsts = await XboxLiveAuth.exchangeUserTokenForXSTSIdentity(
xblUserToken.Token, { XSTSRelyingParty: this.relyingParty, raw: false }
)
this.setCachedXstsToken(xsts)
debug('[xbl] xsts', xsts)
return xsts
}
}
// Manages Minecraft tokens for sessionserver.mojang.com
class MinecraftTokenManager {
constructor(clientPublicKey, cacheLocation) {
this.clientPublicKey = clientPublicKey
this.cacheLocation = cacheLocation || path.join(__dirname, './bed-cache.json')
try {
this.cache = require(this.cacheLocation)
} catch (e) {
this.cache = {}
}
}
getCachedAccessToken() {
const token = this.cache.mca
debug('[mc] token cache', this.cache)
if (!token) return
console.log('TOKEN', token)
const jwt = token.chain[0]
const [header, payload, signature] = jwt.split('.').map(k => Buffer.from(k, 'base64'))
const body = JSON.parse(String(payload))
const expires = new Date(body.exp * 1000)
const remainingMs = expires - Date.now()
const valid = remainingMs > 1000
return { valid, until: expires, chain: token.chain }
}
setCachedAccessToken(data) {
data.obtainedOn = Date.now()
this.cache.mca = data
fs.writeFileSync(this.cacheLocation, JSON.stringify(this.cache))
}
async verifyTokens() {
const at = this.getCachedAccessToken()
if (!at || this.forceRefresh) {
return false
}
debug('[mc] have user access token', at)
if (at.valid) {
return true
}
return false
}
async getAccessToken(clientPublicKey, xsts) {
debug('[mc] authing to minecraft', clientPublicKey, xsts)
const getFetchOptions = {
headers: {
'Content-Type': 'application/json',
'User-Agent': 'node-minecraft-protocol',
'Authorization': `XBL3.0 x=${xsts.userHash};${xsts.XSTSToken}`
}
}
const MineServicesResponse = await fetch(authConstants.MinecraftAuth, {
method: 'post',
...getFetchOptions,
body: JSON.stringify({ identityPublicKey: clientPublicKey })
}).then(checkStatus)
debug('[mc] mc auth response', MineServicesResponse)
this.setCachedAccessToken(MineServicesResponse)
return MineServicesResponse
}
}
function checkStatus(res) {
if (res.ok) { // res.status >= 200 && res.status < 300
return res.json()
} else {
throw Error(res.statusText)
}
}
module.exports = { MsaTokenManager, XboxTokenManager, MinecraftTokenManager }

177
src/connection.js Normal file
View file

@ -0,0 +1,177 @@
const BinaryStream = require('@jsprismarine/jsbinaryutils').default
const BatchPacket = require('./datatypes/BatchPacket')
const cipher = require('./transforms/encryption')
const { EventEmitter } = require('events')
const Reliability = require('jsp-raknet/protocol/reliability')
const debug = require('debug')('minecraft-protocol')
class Connection extends EventEmitter {
startEncryption(iv) {
this.encryptionEnabled = true
this.inLog('Started encryption', this.sharedSecret, iv)
this.decrypt = cipher.createDecryptor(this, iv)
this.encrypt = cipher.createEncryptor(this, iv)
this.q2 = []
}
write(name, params) { // TODO: Batch
// console.log('Need to encode', name, params)
var s = this.connect ? 'C' : 'S'
if (this.downQ) s += 'P'
this.outLog('NB <- ' + s, name,params)
const batch = new BatchPacket()
const packet = this.serializer.createPacketBuffer({ name, params })
// console.log('Sending buf', packet.toString('hex').)
batch.addEncodedPacket(packet)
if (this.encryptionEnabled) {
this.sendEncryptedBatch(batch)
} else {
this.sendDecryptedBatch(batch)
}
}
queue(name, params) {
this.outLog('Q <- ', name, params)
const packet = this.serializer.createPacketBuffer({ name, params })
if (name == 'level_chunk' || name=='client_cache_blob_status' || name == 'client_cache_miss_response') {
// Skip queue, send ASAP
this.sendBuffer(packet)
return
}
this.q.push(packet)
this.q2.push(name)
}
startQueue() {
this.q = []
this.loop = setInterval(() => {
if (this.q.length) {
//TODO: can we just build Batch before the queue loop?
const batch = new BatchPacket()
this.outLog('<- BATCH', this.q2)
// For now, we're over conservative so send max 3 packets
// per batch and hold the rest for the next tick
const sending = []
for (let i = 0; i < this.q.length; i++) {
const packet = this.q.shift()
sending.push(this.q2.shift())
batch.addEncodedPacket(packet)
}
// this.outLog('~~ Sending', sending)
if (this.encryptionEnabled) {
this.sendEncryptedBatch(batch)
} else {
this.sendDecryptedBatch(batch)
}
// this.q2 = []
}
}, 20)
}
writeRaw(name, buffer) { // skip protodef serializaion
// temporary hard coded stuff
const batch = new BatchPacket()
if (name == 'biome_definition_list') {
// so we can send nbt straight from file without parsing
const stream = new BinaryStream()
stream.writeUnsignedVarInt(0x7a)
stream.append(buffer)
batch.addEncodedPacket(stream.getBuffer())
}
if (this.encryptionEnabled) {
this.sendEncryptedBatch(batch)
} else {
this.sendDecryptedBatch(batch)
}
}
/**
* Sends a MCPE packet buffer
*/
sendBuffer(buffer, immediate = false) {
if (immediate) {
const batch = new BatchPacket()
batch.addEncodedPacket(buffer)
if (this.encryptionEnabled) {
this.sendEncryptedBatch(batch)
} else {
this.sendDecryptedBatch(batch)
}
} else {
this.q.push(buffer)
this.q2.push('rawBuffer')
}
}
sendDecryptedBatch(batch) {
const buf = batch.encode()
// send to raknet
this.sendMCPE(buf, true)
}
sendEncryptedBatch(batch) {
const buf = batch.stream.getBuffer()
debug('Sending encrypted batch', batch)
this.encrypt(buf)
}
// TODO: Rename this to sendEncapsulated
sendMCPE(buffer, immediate) {
this.connection.sendReliable(buffer, immediate)
// if (this.worker) {
// this.outLog('-> buf', buffer)
// this.worker.postMessage({ type: 'queueEncapsulated', packet: buffer, immediate })
// } else {
// const sendPacket = new EncapsulatedPacket()
// sendPacket.reliability = Reliability.ReliableOrdered
// sendPacket.buffer = buffer
// this.connection.addEncapsulatedToQueue(sendPacket)
// if (immediate) this.connection.sendQueue()
// }
}
// These are callbacks called from encryption.js
onEncryptedPacket = (buf) => {
this.outLog('ENC BUF', buf)
const packet = Buffer.concat([Buffer.from([0xfe]), buf]) // add header
this.outLog('Sending wrapped encrypted batch', packet)
this.sendMCPE(packet)
}
onDecryptedPacket = (buf) => {
// console.log('🟢 Decrypted', buf)
const stream = new BinaryStream(buf)
const packets = BatchPacket.getPackets(stream)
for (const packet of packets) {
this.readPacket(packet)
}
}
handle(buffer) { // handle encapsulated
if (buffer[0] == 0xfe) { // wrapper
if (this.encryptionEnabled) {
this.decrypt(buffer.slice(1))
} else {
const stream = new BinaryStream(buffer)
const batch = new BatchPacket(stream)
batch.decode()
const packets = batch.getPackets()
this.inLog('Reading ', packets.length, 'packets')
for (var packet of packets) {
this.readPacket(packet)
}
}
}
// console.log('[client] handled incoming ', buffer)
}
}
function serialize(obj = {}, fmt) {
return JSON.stringify(obj, (k, v) => typeof v == 'bigint' ? v.toString() : v, fmt)
}
module.exports = { Connection }

View file

@ -1,61 +0,0 @@
'use strict';
const assert = require('assert');
const raknet = require('raknet');
const fs = require('fs');
const path = require('path');
const zlib = require('zlib');
const ProtoDef = require('protodef').ProtoDef;
const batchProto=new ProtoDef();
batchProto.addTypes(require("./datatypes/minecraft"));
batchProto.addType("insideBatch",["endOfArray",{"type":["buffer",{"countType":"i32"}]}]);
function createClient(options) {
assert.ok(options, 'options is required');
var port = options.port || 19132;
var host = options.host || 'localhost';
assert.ok(options.username, 'username is required');
options.customPackets=require('minecraft-data')('pe_0.14').protocol;
options.customTypes=require('./datatypes/minecraft');
var client=raknet.createClient(options);
client.username = options.username;
client.on('mcpe',packet => client.emit(packet.name,packet.params))
client.writeMCPE=(name,packet) => {
client.writeEncapsulated('mcpe',{
name:name,
params:packet
});
};
client.on('login', function() {
client.writeMCPE('game_login',
{
username: client.username,
protocol: 70,
protocol2: 70,
client_id: [ -1, -697896776 ],
client_uuid: '86372ed8-d055-b23a-9171-5e3ac594d766',
server_address: client.host+":"+client.port,
client_secret: new Buffer('e8 88 db 7b 9f f2 f0 44 a3 51 08 18 4e 8c 7f 9a'.replace(/ /g,''),'hex'),
skin:
{
skinType: 'Standard_Steve',
texture: fs.readFileSync(path.join(__dirname,'texture'))
}
}
);
});
client.on('batch', function(packet) {
var buf = zlib.inflateSync(packet.payload);
var packets=batchProto.parsePacketBuffer("insideBatch",buf).data;
packets.forEach(packet => client.readEncapsulatedPacket(Buffer.concat([new Buffer([0x8e]),packet])));
});
return client;
}
module.exports = createClient;

View file

@ -1,47 +0,0 @@
const raknet = require('raknet');
const zlib = require('zlib');
const ProtoDef = require('protodef').ProtoDef;
const batchProto=new ProtoDef();
batchProto.addTypes(require("./datatypes/minecraft"));
batchProto.addType("insideBatch",["endOfArray",{"type":["buffer",{"countType":"i32"}]}]);
function createServer(options) {
options = options || {};
var port = options.port != null ?
options.port :
options['server-port'] != null ?
options['server-port'] :
19132;
var host = options.host || '0.0.0.0';
options.customPackets=require('minecraft-data')('pe_0.14').protocol;
options.customTypes=require("./datatypes/minecraft");
var server = raknet.createServer(options);
server.name = options.name || "Minecraft Server";
server.motd = options.motd || "A Minecraft server";
server.maxPlayers = options['max-players'] || 20;
server.playerCount = 0;
server.on("connection", function (client) {
client.on("mcpe",packet => client.emit(packet.name,packet.params));
client.writeMCPE=(name,packet) => {
client.writeEncapsulated("mcpe",{
name:name,
params:packet
});
};
client.writeBatch=function(packets) {
const payload=zlib.deflateSync(batchProto.createPacketBuffer("insideBatch",
packets.map(packet =>
client.encapsulatedPacketSerializer.createPacketBuffer(packet).slice(1))));
client.writeMCPE("batch",{
payload:payload
});
}
});
return server;
}
module.exports = createServer;

View file

@ -0,0 +1,79 @@
const BinaryStream = require('@jsprismarine/jsbinaryutils').default
const Zlib = require('zlib');
const NETWORK_ID = 0xfe
// This is not a real MCPE packet, it's a wrapper that contains compressed/encrypted batched packets
class BatchPacket {
constructor(stream) {
// Shared
this.payload = Buffer.alloc(0)
this.stream = stream || new BinaryStream()
// Decoding
this.packets = []
// Encoding
this.compressionLevel = 7
this.count = 0
}
decode() {
// Read header
const pid = this.stream.readByte();
if (!pid === NETWORK_ID) {
throw new Error(`Batch ID mismatch: is ${BatchPacket.NETWORK_ID}, got ${pid}`) // this is not a BatchPacket
}
// Decode the payload
try {
this.payload = Zlib.inflateRawSync(this.stream.readRemaining(), {
chunkSize: 1024 * 1024 * 2
});
} catch (e) {
console.error(e)
console.debug(`[bp] Error decompressing packet ${pid}`)
}
}
encode() {
const buf = this.stream.getBuffer()
console.log('Encoding payload', buf)
const def = Zlib.deflateRawSync(buf, { level: this.compressionLevel })
const ret = Buffer.concat([Buffer.from([0xfe]), def])
console.log('Compressed', ret)
return ret
}
addEncodedPacket(packet) {
this.stream.writeUnsignedVarInt(packet.byteLength)
this.stream.append(packet)
this.count++
}
getPackets() {
const stream = new BinaryStream()
stream.buffer = this.payload
const packets = []
while (!stream.feof()) {
const length = stream.readUnsignedVarInt()
const buffer = stream.read(length)
packets.push(buffer)
}
return packets
}
static getPackets(stream) {
const packets = []
while (!stream.feof()) {
const length = stream.readUnsignedVarInt()
const buffer = stream.read(length)
packets.push(buffer)
}
return packets
}
}
module.exports = BatchPacket

View file

@ -0,0 +1,166 @@
const UUID = require('uuid-1345')
const minecraft = require('./minecraft')
const { Read, Write, SizeOf } = require('./varlong')
/**
* UUIDs
*/
Read.uuid = ['native', (buffer, offset) => {
return {
value: UUID.stringify(buffer.slice(offset, 16 + offset)),
size: 16
}
}]
Write.uuid = ['native', (value, buffer, offset) => {
const buf = UUID.parse(value)
buf.copy(buffer, offset)
return offset + 16
}]
SizeOf.uuid = ['native', 16]
/**
* Rest of buffer
*/
Read.restBuffer = ['native', (buffer, offset) => {
return {
value: buffer.slice(offset),
size: buffer.length - offset
}
}]
Write.restBuffer = ['native', (value, buffer, offset) => {
value.copy(buffer, offset)
return offset + value.length
}]
SizeOf.restBuffer = ['native', (value) => {
return value.length
}]
/**
* Read NBT until end of buffer or \0
*/
Read.nbtLoop = ['context', (buffer, offset) => {
const values = []
while (buffer[offset] != 0) {
// console.log('offs',offset, buffer.length,buffer.slice(offset))
const n = ctx.nbt(buffer, offset)
// console.log('read',n)
values.push(n.value)
offset += n.size
}
// console.log('Ext',offset, buffer.length,buffer.slice(offset))
return { value: values, size: buffer.length - offset }
}]
Write.nbtLoop = ['context', (value, buffer, offset) => {
for (const val of value) {
// console.log('val',val,offset)
offset = ctx.nbt(val, buffer, offset)
}
// offset += 1
// console.log('writing 0', offset)
buffer.writeUint8(0, offset)
return offset + 1
}]
SizeOf.nbtLoop = ['context', (value, buffer, offset) => {
let size = 1
for (const val of value) {
size += ctx.nbt(val, buffer, offset)
}
return size
}]
/**
* NBT
*/
Read.nbt = ['native', minecraft.nbt[0]]
Write.nbt = ['native', minecraft.nbt[1]]
SizeOf.nbt = ['native', minecraft.nbt[2]]
/**
* Bits
*/
// nvm,
// Read.bitflags = ['parametrizable', (compiler, { type, flags }) => {
// return compiler.wrapCode(`
// const { value, size } = ${compiler.callType('buffer, offset', type)}
// const val = {}
// for (let i = 0; i < size; i++) {
// const hi = (value >> i) & 1
// if ()
// const v = value &
// if (flags[i])
// }
// `
// }]
Read.bitflags = ['parametrizable', (compiler, { type, flags }) => {
return compiler.wrapCode(`
const { value: _value, size } = ${compiler.callType(type, 'offset')}
const value = { _value }
const flags = ${JSON.stringify(flags)}
for (const key in flags) {
value[key] = (_value & flags[key]) == flags[key]
}
return { value, size }
`.trim())
}]
Write.bitflags = ['parametrizable', (compiler, { type, flags }) => {
return compiler.wrapCode(`
const flags = ${JSON.stringify(flags)}
let val = value._value
for (const key in flags) {
if (value[key]) val |= flags[key]
}
return (ctx.${type})(val, buffer, offset)
`.trim())
}]
SizeOf.bitflags = ['parametrizable', (compiler, { type, flags }) => {
return compiler.wrapCode(`
const flags = ${JSON.stringify(flags)}
let val = value._value
for (const key in flags) {
if (value[key]) val |= flags[key]
}
return (ctx.${type})(val)
`.trim())
}]
/**
* Command Packet
* - used for determining the size of the following enum
*/
Read.enum_size_based_on_values_len = ['parametrizable', (compiler) => {
return compiler.wrapCode(js(() => {
if (values_len <= 0xff) return { value: 'byte', size: 0 }
if (values_len <= 0xffff) return { value: 'short', size: 0 }
if (values_len <= 0xffffff) return { value: 'int', size: 0 }
}))
}]
Write.enum_size_based_on_values_len = ['parametrizable', (compiler) => {
return str(() => {
if (value.values_len <= 0xff) _enum_type = 'byte'
else if (value.values_len <= 0xffff) _enum_type = 'short'
else if (value.values_len <= 0xffffff) _enum_type = 'int'
return offset
})
}]
SizeOf.enum_size_based_on_values_len = ['parametrizable', (compiler) => {
return str(() => {
if (value.values_len <= 0xff) _enum_type = 'byte'
else if (value.values_len <= 0xffff) _enum_type = 'short'
else if (value.values_len <= 0xffffff) _enum_type = 'int'
return 0
})
}]
function js(fn) {
return fn.toString().split('\n').slice(1, -1).join('\n').trim()
}
function str(fn) {
return fn.toString() + ')();(()=>{}'
}
module.exports = { Read, Write, SizeOf }

View file

@ -1,33 +1,35 @@
'use strict';
var nbt = require('prismarine-nbt')
const UUID = require('uuid-1345')
var nbt = require('prismarine-nbt');
const UUID = require('uuid-1345');
const proto = nbt.protos.littleVarint
// TODO: deal with this:
var zigzag = require('prismarine-nbt/compiler-zigzag')
function readUUID(buffer, offset) {
if(offset+16>buffer.length)
if (offset + 16 > buffer.length)
throw new PartialReadError();
return {
value: UUID.stringify(buffer.slice(offset,16+offset)),
value: UUID.stringify(buffer.slice(offset, 16 + offset)),
size: 16
};
}
function writeUUID(value, buffer, offset) {
const buf=UUID.parse(value);
buf.copy(buffer,offset);
const buf = UUID.parse(value);
buf.copy(buffer, offset);
return offset + 16;
}
function readNbt(buffer, offset) {
return nbt.protoLE.read(buffer,offset,"nbt");
return proto.read(buffer, offset, "nbt")
}
function writeNbt(value, buffer, offset) {
return nbt.protoLE.write(value,buffer,offset,"nbt");
return proto.write(value, buffer, offset, "nbt")
}
function sizeOfNbt(value) {
return nbt.protoLE.sizeOf(value,"nbt");
return proto.sizeOf(value, "nbt")
}
function readEntityMetadata(buffer, offset, _ref) {
@ -75,7 +77,7 @@ function sizeOfEntityMetadata(value, _ref3) {
}
function readIpAddress(buffer, offset) {
var address = buffer[offset] + '.' + buffer[offset+1] + '.' + buffer[offset+2] + '.' + buffer[offset+3];
var address = buffer[offset] + '.' + buffer[offset + 1] + '.' + buffer[offset + 2] + '.' + buffer[offset + 3];
return {
size: 4,
value: address
@ -85,7 +87,7 @@ function readIpAddress(buffer, offset) {
function writeIpAddress(value, buffer, offset) {
var address = value.split('.');
address.forEach(function(b) {
address.forEach(function (b) {
buffer[offset] = parseInt(b);
offset++;
});
@ -94,10 +96,10 @@ function writeIpAddress(value, buffer, offset) {
}
function readEndOfArray(buffer, offset, typeArgs) {
var type=typeArgs.type;
var type = typeArgs.type;
var cursor = offset;
var elements = [];
while(cursor<buffer.length) {
while (cursor < buffer.length) {
var results = this.read(buffer, cursor, type, {});
elements.push(results.value);
cursor += results.size;
@ -108,19 +110,19 @@ function readEndOfArray(buffer, offset, typeArgs) {
};
}
function writeEndOfArray(value, buffer, offset,typeArgs) {
var type=typeArgs.type;
function writeEndOfArray(value, buffer, offset, typeArgs) {
var type = typeArgs.type;
var self = this;
value.forEach(function(item) {
value.forEach(function (item) {
offset = self.write(item, buffer, offset, type, {});
});
return offset;
}
function sizeOfEndOfArray(value, typeArgs) {
var type=typeArgs.type;
var type = typeArgs.type;
var size = 0;
for(var i = 0; i < value.length; ++i) {
for (var i = 0; i < value.length; ++i) {
size += this.sizeOf(value[i], type, {});
}
return size;
@ -131,5 +133,7 @@ module.exports = {
'nbt': [readNbt, writeNbt, sizeOfNbt],
'entityMetadataLoop': [readEntityMetadata, writeEntityMetadata, sizeOfEntityMetadata],
'ipAddress': [readIpAddress, writeIpAddress, 4],
'endOfArray':[readEndOfArray,writeEndOfArray,sizeOfEndOfArray]
};
'endOfArray': [readEndOfArray, writeEndOfArray, sizeOfEndOfArray],
'zigzag32': zigzag.zigzag32,
'zigzag64': zigzag.zigzag64
}

12
src/datatypes/promises.js Normal file
View file

@ -0,0 +1,12 @@
module.exports = {
sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms))
},
waitFor(cb, withTimeout) {
return Promise.race([
new Promise((res, rej) => cb(res)),
sleep(withTimeout)
])
}
}

63
src/datatypes/varlong.js Normal file
View file

@ -0,0 +1,63 @@
function sizeOfVarLong(value) {
if (typeof value.valueOf() === 'object') {
value = (BigInt(value[0]) << 32n) | BigInt(value[1])
} else if (typeof value !== 'bigint') value = BigInt(value)
let cursor = 0
while (value > 127n) {
value >>= 7n
cursor++
}
return cursor + 1
}
/**
* Reads a 64-bit VarInt as a BigInt
*/
function readVarLong(buffer, offset) {
let result = BigInt(0)
let shift = 0n
let cursor = offset
let size = 0
while (true) {
if (cursor + 1 > buffer.length) { throw new Error('unexpected buffer end') }
const b = buffer.readUInt8(cursor)
result |= (BigInt(b) & 0x7fn) << shift // Add the bits to our number, except MSB
cursor++
if (!(b & 0x80)) { // If the MSB is not set, we return the number
size = cursor - offset
break
}
shift += 7n // we only have 7 bits, MSB being the return-trigger
if (shift > 63n) throw new Error(`varint is too big: ${shift}`)
}
return { value: result, size }
}
/**
* Writes a zigzag encoded 64-bit VarInt as a BigInt
*/
function writeVarLong(value, buffer, offset) {
// if an array, turn it into a BigInt
if (typeof value.valueOf() === 'object') {
value = BigInt.asIntN(64, (BigInt(value[0]) << 32n)) | BigInt(value[1])
} else if (typeof value !== 'bigint') value = BigInt(value)
let cursor = 0
while (value > 127n) { // keep writing in 7 bit slices
const num = Number(value & 0xFFn)
buffer.writeUInt8(num | 0x80, offset + cursor)
cursor++
value >>= 7n
}
buffer.writeUInt8(Number(value), offset + cursor)
return offset + cursor + 1
}
module.exports = {
Read: { varint64: ['native', readVarLong] },
Write: { varint64: ['native', writeVarLong] },
SizeOf: { varint64: ['native', sizeOfVarLong] }
}

View file

@ -1,7 +0,0 @@
module.exports = {
createSerializer: require("./transforms/serializer").createSerializer,
createDeserializer: require("./transforms/serializer").createDeserializer,
createProtocol: require('./transforms/serializer').createProtocol,
createServer: require("./createServer"),
createClient: require("./createClient")
};

12
src/options.js Normal file
View file

@ -0,0 +1,12 @@
// Minimum supported version (< will be kicked)
const MIN_VERSION = 422
// Currently supported verson
const CURRENT_VERSION = 422
const defaultOptions = {
// https://minecraft.gamepedia.com/Protocol_version#Bedrock_Edition_2
version: CURRENT_VERSION
}
module.exports = { defaultOptions, MIN_VERSION, CURRENT_VERSION }

180
src/rak.js Normal file
View file

@ -0,0 +1,180 @@
const { EventEmitter } = require('events')
const Listener = require('jsp-raknet/listener')
const EncapsulatedPacket = require('jsp-raknet/protocol/encapsulated_packet')
const RakClient = require('jsp-raknet/client')
const ConnWorker = require('./rakWorker')
try {
var { Client, Server, PacketPriority, PacketReliability, McPingMessage } = require('raknet-native')
} catch (e) {
console.debug('[raknet] native not found, using js', e)
}
class RakNativeClient extends EventEmitter {
constructor(options) {
super()
this.onConnected = () => {}
this.onCloseConnection = () => {}
this.onEncapsulated = () => {}
this.raknet = new Client(options.hostname, options.port, 'minecraft')
this.raknet.on('encapsulated', thingy => {
// console.log('Encap',thingy)
const { buffer, address, guid }=thingy
this.onEncapsulated(buffer, address)
})
this.raknet.on('connected', () => {
this.onConnected()
})
}
async ping() {
this.raknet.ping()
return waitFor((done) => {
this.raknet.on('pong', (ret) => {
if (ret.extra) {
done(ret.extra.toString())
}
})
}, 1000)
}
connect() {
this.raknet.connect()
}
sendReliable(buffer, immediate) {
const priority = immediate ? PacketPriority.IMMEDIATE_PRIORITY : PacketPriority.MEDIUM_PRIORITY
return this.raknet.send(buffer, priority, PacketReliability.RELIABLE_ORDERED, 0)
}
}
class RakNativeServer extends EventEmitter {
constructor(options = {}) {
super()
console.log('opts',options)
this.onOpenConnection = () => {}
this.onCloseConnection = () => {}
this.onEncapsulated = () => {}
this.raknet = new Server(options.hostname, options.port, {
maxConnections: options.maxConnections || 3,
minecraft: { },
message: new McPingMessage().toBuffer()
})
this.raknet.on('openConnection', (client) => {
client.sendReliable = function(buffer, immediate) {
const priority = immediate ? PacketPriority.IMMEDIATE_PRIORITY : PacketPriority.MEDIUM_PRIORITY
return this.send(buffer, priority, PacketReliability.RELIABLE_ORDERED, 0)
}
this.onOpenConnection(client)
})
this.raknet.on('closeConnection', (client) => {
console.log('!!! Client CLOSED CONNECTION!')
this.onCloseConnection(client)
})
this.raknet.on('encapsulated', (thingy) => {
const { buffer, address, guid }=thingy
// console.log('ENCAP',thingy)
this.onEncapsulated(buffer, address)
})
}
listen() {
this.raknet.listen()
}
}
class RakJsClient extends EventEmitter {
constructor(options = {}) {
super()
this.onConnected = () => {}
this.onEncapsulated = () => {}
if (options.useWorkers) {
this.connect = this.workerConnect
this.sendReliable = this.workerSendReliable
} else {
this.connect = this.plainConnect
this.sendReliable = this.plainSendReliable
}
}
workerConnect(hostname = this.options.hostname, port = this.options.port) {
this.worker = ConnWorker.connect(hostname, port)
this.worker.on('message', (evt) => {
switch (evt.type) {
case 'connected':
this.onConnected()
break
case 'encapsulated':
const [ecapsulated, address] = evt.args
this.onEncapsulated(ecapsulated.buffer, address.hash)
break
}
})
}
async plainConnect(hostname = this.options.hostname, port = this.options.port) {
this.raknet = new RakClient(hostname, port)
await this.raknet.connect()
this.raknet.on('connecting', () => {
console.log(`[client] connecting to ${hostname}/${port}`)
})
this.raknet.on('connected', this.onConnected)
this.raknet.on('encapsulated', (encapsulated, addr) => this.onEncapsulated(encapsulated.buffer, addr.hash))
}
workerSendReliable(buffer, immediate) {
this.worker.postMessage({ type: 'queueEncapsulated', packet: buffer, immediate })
}
plainSendReliable(buffer, immediate) {
const sendPacket = new EncapsulatedPacket()
sendPacket.reliability = Reliability.ReliableOrdered
sendPacket.buffer = buffer
this.connection.addEncapsulatedToQueue(sendPacket)
if (immediate) this.connection.sendQueue()
}
}
class RakJsServer extends EventEmitter {
constructor(options = {}) {
super()
this.options = options
this.onOpenConnection = () => {}
this.onCloseConnection = () => {}
this.onEncapsulated = () => {}
if (options.useWorkers) {
throw Error('nyi')
} else {
this.listen = this.plainListen
}
}
async plainListen() {
this.raknet = new Listener()
await this.raknet.listen(this.options.hostname, this.options.port)
this.raknet.on('openConnection', (conn) => {
conn.sendReliable = function(buffer, immediate) {
const sendPacket = new EncapsulatedPacket()
sendPacket.reliability = Reliability.ReliableOrdered
sendPacket.buffer = buffer
this.connection.addEncapsulatedToQueue(sendPacket)
if (immediate) this.raknet.sendQueue()
}
this.onOpenConnection(conn)
})
this.raknet.on('closeConnection', this.onCloseConnection)
this.raknet.on('encapsulated', this.onEncapsulated)
}
}
module.exports = {
RakClient: Client ? RakNativeClient : RakJsClient,
RakServer: Server ? RakNativeServer : RakJsServer
}

64
src/rakWorker.js Normal file
View file

@ -0,0 +1,64 @@
const RakClient = require('jsp-raknet/client')
const { Worker, isMainThread, parentPort } = require('worker_threads')
const EncapsulatedPacket = require('jsp-raknet/protocol/encapsulated_packet')
const Reliability = require('jsp-raknet/protocol/reliability')
function connect(hostname, port) {
if (isMainThread) {
const worker = new Worker(__filename)
worker.postMessage({ type: 'connect', hostname, port })
return worker
}
}
var raknet
function main() {
parentPort.on('message', (evt) => {
if (evt.type == 'connect') {
const { hostname, port } =evt
raknet = new RakClient(hostname, port)
raknet.connect().then(() => {
console.log('Raknet Connected!')
})
raknet.on('connecting', () => {
console.log(`[client] connecting to ${hostname}/${port}`)
parentPort.postMessage('message', { type: 'connecting' })
console.log('Raknet', raknet)
})
raknet.once('connected', (connection) => {
console.log(`[worker] connected!`)
globalThis.raknetConnection = connection
parentPort.postMessage({ type: 'connected' })
})
raknet.on('encapsulated', (...args) => {
// console.log('-> ENCAP BUF', args)
setTimeout(() => {
parentPort.postMessage({ type: 'encapsulated', args })
}, 100)
})
raknet.on('raw', (buffer, inetAddr) => {
console.log('Raw packet', buffer, inetAddr)
})
} else if (evt.type == 'queueEncapsulated') {
console.log('SEND' , globalThis.raknetConnection, evt.packet)
const sendPacket = new EncapsulatedPacket()
sendPacket.reliability = Reliability.ReliableOrdered
sendPacket.buffer = evt.packet
globalThis.raknetConnection?.addEncapsulatedToQueue(sendPacket)
if (evt.immediate) {
globalThis.raknetConnection?.sendQueue()
}
}
})
}
if (!isMainThread) main()
module.exports = { connect }

230
src/relay.js Normal file
View file

@ -0,0 +1,230 @@
// process.env.DEBUG = 'minecraft-protocol raknet'
const fs = require('fs')
const { Client } = require("./client")
const { Server } = require("./server")
const { Player } = require("./serverPlayer")
const debug = require('debug')('minecraft-protocol relay')
/** @typedef {{ hostname: string, port: number, auth: 'client' | 'server' | null, destination?: { hostname: string, port: number } }} Options */
const debugging = true // Do re-encoding tests
class RelayPlayer extends Player {
constructor(server, conn) {
super(server, conn)
this.server = server
this.conn = conn
this.startRelaying = false
this.once('join', () => { // The client has joined our proxy
this.flushDownQueue() // Send queued packets from the upstream backend
this.startRelaying = true
})
this.downQ = []
this.upQ = []
this.upInLog = (...msg) => console.info('** Backend -> Proxy', ...msg)
this.upOutLog = (...msg) => console.info('** Proxy -> Backend', ...msg)
this.downInLog = (...msg) => console.info('** Client -> Proxy', ...msg)
this.downOutLog = (...msg) => console.info('** Proxy -> Client', ...msg)
if (!server.options.logging) {
this.upInLog = () => { }
this.upOutLog = () => { }
this.downInLog = () => { }
this.downOutLog = () => { }
}
// this.upOutLog = (...msg) => {
// if (msg.includes('player_auth_input')) {
// // stream.write(msg)
// console.info('INPUT', msg)
// }
// }
this.outLog = this.downOutLog
this.inLog = this.downInLog
}
// Called when we get a packet from backend server (Backend -> PROXY -> Client)
readUpstream(packet) {
if (!this.startRelaying) {
console.warn('The downstream client is not ready yet !!')
this.downQ.push(packet)
return
}
this.upInLog('Recv packet', packet)
const des = this.server.deserializer.parsePacketBuffer(packet)
const name = des.data.name
const params = des.data.params
this.upInLog('~ Bounce B->C', name, serialize(params).slice(0, 100))
// this.upInLog('~ ', des.buffer)
if (name == 'play_status' && params.status == 'login_success') return // We already sent this, this needs to be sent ASAP or client will disconnect
if (debugging) { // some packet encode/decode testing stuff
const rpacket = this.server.serializer.createPacketBuffer({ name, params })
if (rpacket.toString('hex') !== packet.toString('hex')) {
console.warn('New', rpacket.toString('hex'))
console.warn('Old', packet.toString('hex'))
console.log('Failed to re-encode', name, params)
process.exit(1)
throw Error('re-encoding fail for' + name + ' - ' + JSON.stringify(params))
}
}
this.queue(name, params)
// this.sendBuffer(packet)
}
// Send queued packets to the connected client
flushDownQueue() {
for (const packet of this.downQ) {
const des = this.server.deserializer.parsePacketBuffer(packet)
this.write(des.data.name, des.data.params)
}
this.downQ = []
}
// Send queued packets to the backend upstream server from the client
flushUpQueue() {
for (var e of this.upQ) { // Send the queue
const des = this.server.deserializer.parsePacketBuffer(e)
if (des.data.name == 'client_cache_status') { // Currently broken, force off the chunk cache
this.upstream.write('client_cache_status', { enabled: false })
} else {
this.upstream.write(des.data.name, des.data.params)
}
}
this.upQ = []
}
// Called when the server gets a packet from the downstream player (Client -> PROXY -> Backend)
readPacket(packet) {
if (this.startRelaying) { // The downstream client conn is established & we got a packet to send to upstream server
if (!this.upstream) { // Upstream is still connecting/handshaking
this.downInLog('Got downstream connected packet but upstream is not connected yet, added to q', this.upQ.length)
this.upQ.push(packet) // Put into a queue
return
}
this.flushUpQueue() // Send queued packets
this.downInLog('Recv packet', packet)
const des = this.server.deserializer.parsePacketBuffer(packet)
if (debugging) { // some packet encode/decode testing stuff
const rpacket = this.server.serializer.createPacketBuffer(des.data)
if (rpacket.toString('hex') !== packet.toString('hex')) {
console.warn('New', rpacket.toString('hex'))
console.warn('Old', packet.toString('hex'))
console.log('Failed to re-encode', des.data)
process.exit(1)
}
}
switch (des.data.name) {
case 'client_cache_status':
this.upstream.queue('client_cache_status', { enabled: false })
break
default:
// Emit the packet as-is back to the upstream server
// this.upstream.queue(des.data.name, des.data.params)
this.downInLog('Relaying', des.data)
this.upstream.sendBuffer(packet)
}
} else {
super.readPacket(packet)
}
}
}
class Relay extends Server {
/**
* Creates a new non-transparent proxy connection to a destination server
* @param {Options} options
*/
constructor(options) {
super(options)
this.RelayPlayer = options.relayPlayer || RelayPlayer
this.forceSingle = true
this.upstreams = new Map()
}
openUpstreamConnection(ds, clientAddr) {
const client = new Client({
hostname: this.options.destination.hostname,
port: this.options.destination.port,
encrypt: this.options.encrypt
})
client.outLog = ds.upOutLog
client.inLog = ds.upInLog
// console.log('Set upstream logs', client.outLog, client.inLog)
client.once('join', () => { // Intercept once handshaking done
ds.upstream = client
ds.flushUpQueue()
console.log('UPSTREAM HAS JOINED')
client.readPacket = (packet) => ds.readUpstream(packet)
})
this.upstreams.set(clientAddr.hash, client)
}
closeUpstreamConnection(clientAddr) {
const up = this.upstreams.get(clientAddr.hash)
if (!up) throw Error(`unable to close non-existant connection ${clientAddr.hash}`)
up.close()
this.upstreams.delete(clientAddr.hash)
debug('relay closed connection', clientAddr)
}
onOpenConnection = (conn) => {
debug('new connection', conn)
if (this.forceSingle && this.clientCount > 0) {
debug('dropping connection as single client relay', conn)
conn.close()
} else {
const player = new this.RelayPlayer(this, conn)
console.debug('New connection from', conn.address)
this.clients[conn.address] = player
this.emit('connect', { client: player })
this.openUpstreamConnection(player, conn.address)
}
}
}
function serialize(obj = {}, fmt) {
return JSON.stringify(obj, (k, v) => typeof v == 'bigint' ? v.toString() : v, fmt)
}
function createRelay() {
console.log('Creating relay')
/**
* Example to create a non-transparent proxy (or 'Relay') connection to destination server
* In Relay we de-code and re-encode packets
*/
const relay = new Relay({
/* Hostname and port for clients to listen to */
hostname: '0.0.0.0',
port: 19130,
/**
* Who does the authentication
* If set to `client`, all connecting clients will be sent a message with a link to authenticate
* If set to `server`, the server will authenticate and only one client will be able to join
* (Default) If set to `none`, no authentication will be done
*/
auth: 'server',
/**
* Sets if packets will automatically be forwarded. If set to false, you must listen for on('packet')
* events and
*/
auto: true,
/* Where to send upstream packets to */
destination: {
hostname: '127.0.0.1',
port: 19132,
// encryption: true
}
})
relay.create()
}
createRelay()

62
src/server.js Normal file
View file

@ -0,0 +1,62 @@
const { EventEmitter } = require('events')
const { createDeserializer, createSerializer } = require('./transforms/serializer')
const { Player } = require('./serverPlayer')
const { RakServer } = require('./rak')
const Options = require('./options')
const debug = require('debug')('minecraft-protocol')
class Server extends EventEmitter {
constructor(options) {
super()
this.options = { ...Options.defaultOptions, ...options }
this.serializer = createSerializer()
this.deserializer = createDeserializer()
this.clients = {}
this.clientCount = 0
this.validateOptions()
this.inLog = (...args) => console.debug('S', ...args)
this.outLog = (...args) => console.debug('S', ...args)
}
validateOptions() {
if (this.options.version < Options.MIN_VERSION) {
throw new Error(`Unsupported protocol version < ${Options.MIN_VERSION} : ${this.options.version}`)
}
}
onOpenConnection = (conn) => {
this.inLog('new connection', conn)
const player = new Player(this, conn)
this.clients[conn.address] = player
this.clientCount++
this.emit('connect', { client: player })
}
onCloseConnection = (inetAddr, reason) => {
debug('close connection', inetAddr, reason)
delete this.clients[inetAddr]
this.clientCount--
}
onEncapsulated = (buffer, address) => {
debug(address, 'Encapsulated', buffer)
const client = this.clients[address]
if (!client) {
throw new Error(`packet from unknown inet addr: ${address}`)
}
client.handle(buffer)
}
async create(hostname = this.options.hostname, port = this.options.port) {
this.raknet = new RakServer({ hostname, port })
await this.raknet.listen()
console.debug('Listening on', hostname, port)
this.raknet.onOpenConnection = this.onOpenConnection
this.raknet.onCloseConnection = this.onCloseConnection
this.raknet.onEncapsulated = this.onEncapsulated
}
}
const hash = (inetAddr) => inetAddr.address + '/' + inetAddr.port
module.exports = { Server }

132
src/serverPlayer.js Normal file
View file

@ -0,0 +1,132 @@
const { Encrypt } = require('./auth/encryption')
const { decodeLoginJWT } = require('./auth/chains')
const { Connection } = require('./connection')
const fs = require('fs')
const debug = require('debug')('minecraft-protocol')
const ClientStatus = {
Authenticating: 0,
Initializing: 1,
Initialized: 2
}
class Player extends Connection {
constructor(server, connection) {
super()
this.server = server
this.serializer = server.serializer
this.deserializer = server.deserializer
// console.log('serializer/des',this.serializer,this.deserializer)
this.connection = connection
this.options = server.options
Encrypt(this, server, this.options)
this.startQueue()
this.status = ClientStatus.Authenticating
this.inLog = (...args) => console.info('S ->', ...args)
this.outLog = (...args) => console.info('S <-', ...args)
}
getData() {
return this.userData
}
onLogin(packet) {
let body = packet.data
debug('Body', body)
this.emit('loggingIn', body)
const clientVer = body.protocol_version
if (this.server.options.version) {
if (this.server.options.version < clientVer) {
this.sendDisconnectStatus(failed_client)
return
}
} else if (clientVer < MIN_VERSION) {
this.sendDisconnectStatus(failed_client)
return
}
// Parse login data
const authChain = JSON.parse(body.params.chain)
const skinChain = body.params.client_data
try {
var { key, userData, chain } = decodeLoginJWT(authChain.chain, skinChain)
} catch (e) {
console.error(e)
throw new Error('Failed to verify user')
}
console.log('Verified user', 'got pub key', key, userData)
this.emit('login', { user: userData.extraData }) // emit events for user
this.emit('server.client_handshake', { key }) // internal so we start encryption
this.userData = userData.extraData
this.version = clientVer
}
/**
* Disconnects a client before it has joined
* @param {string} play_status
*/
sendDisconnectStatus(play_status) {
this.write('play_status', { status: play_status })
this.connection.close()
}
/**
* Disconnects a client after it has joined
*/
disconnect(reason, hide = false) {
this.write('disconnect', {
hide_disconnect_screen: hide,
message: reason
})
this.connection.close()
}
// After sending Server to Client Handshake, this handles the client's
// Client to Server handshake response. This indicates successful encryption
onHandshake() {
// this.outLog('Sending login success!', this.status)
// https://wiki.vg/Bedrock_Protocol#Play_Status
this.write('play_status', { status: 'login_success' })
this.status = ClientStatus.Initializing
this.emit('join')
}
readPacket(packet) {
// console.log('packet', packet)
try {
var des = this.server.deserializer.parsePacketBuffer(packet)
} catch (e) {
this.disconnect('Server error')
console.warn('Packet parsing failed! Writing dump to ./packetdump.bin')
fs.writeFileSync('packetdump.bin', packet)
fs.writeFileSync('packetdump.txt', packet.toString('hex'))
throw e
}
console.log('-> S', des)
switch (des.data.name) {
case 'login':
console.log(des)
this.onLogin(des)
return
case 'client_to_server_handshake':
// Emit the 'join' event
this.onHandshake()
case 'set_local_player_as_initialized':
this.state = ClientStatus.Initialized
// Emit the 'spawn' event
this.emit('spawn')
default:
console.log('ignoring, unhandled')
}
this.emit(des.data.name, des.data.params)
}
}
module.exports = { Player, ClientStatus }

Binary file not shown.

View file

@ -0,0 +1,164 @@
const { PassThrough, Transform } = require('readable-stream')
const crypto = require('crypto')
const aesjs = require('aes-js')
const Zlib = require('zlib')
const CIPHER = 'aes-256-cfb8'
function createCipher(secret, initialValue) {
if (crypto.getCiphers().includes(CIPHER)) {
return crypto.createCipheriv(CIPHER, secret, initialValue)
}
return new Cipher(secret, initialValue)
}
function createDecipher(secret, initialValue) {
if (crypto.getCiphers().includes(CIPHER)) {
return crypto.createDecipheriv(CIPHER, secret, initialValue)
}
return new Decipher(secret, initialValue)
}
class Cipher extends Transform {
constructor(secret, iv) {
super()
this.aes = new aesjs.ModeOfOperation.cfb(secret, iv, 1) // eslint-disable-line new-cap
}
_transform(chunk, enc, cb) {
try {
const res = this.aes.encrypt(chunk)
cb(null, res)
} catch (e) {
cb(e)
}
}
}
class Decipher extends Transform {
constructor(secret, iv) {
super()
this.aes = new aesjs.ModeOfOperation.cfb(secret, iv, 1) // eslint-disable-line new-cap
}
_transform(chunk, enc, cb) {
try {
const res = this.aes.decrypt(chunk)
cb(null, res)
} catch (e) {
cb(e)
}
}
}
function computeCheckSum(packetPlaintext, sendCounter, secretKeyBytes) {
let digest = crypto.createHash('sha256');
let counter = Buffer.alloc(8)
// writeLI64(sendCounter, counter, 0);
counter.writeBigInt64LE(sendCounter, 0)
// console.log('Send counter', counter)
digest.update(counter);
digest.update(packetPlaintext);
digest.update(secretKeyBytes);
let hash = digest.digest();
// console.log('Hash', hash.toString('hex'))
return hash.slice(0, 8);
}
function createEncryptor(client, iv) {
client.cipher = createCipher(client.secretKeyBytes, iv)
client.sendCounter = client.sendCounter || 0n
// A packet is encrypted via AES256(plaintext + SHA256(send_counter + plaintext + secret_key)[0:8]).
// The send counter is represented as a little-endian 64-bit long and incremented after each packet.
function process(chunk) {
const buffer = Zlib.deflateRawSync(chunk, { level: 7 })
// client.outLog('🟡 Compressed', buffer, client.sendCounter)
const packet = Buffer.concat([buffer, computeCheckSum(buffer, client.sendCounter, client.secretKeyBytes)])
client.sendCounter++
// client.outLog('writing to cipher...', packet, client.secretKeyBytes, iv)
client.cipher.write(packet)
}
// const stream = new PassThrough()
client.cipher.on('data', client.onEncryptedPacket)
return (blob) => {
// client.outLog(client.options ? 'C':'S', '🟡 Encrypting', client.sendCounter, blob)
// stream.write(blob)
process(blob)
}
}
function createDecryptor(client, iv) {
client.decipher = createDecipher(client.secretKeyBytes, iv)
client.receiveCounter = client.receiveCounter || 0n
function verify(chunk) {
// console.log('Decryptor: checking checksum', client.receiveCounter, chunk)
// client.outLog('🔵 Inflating', chunk)
// First try to zlib decompress, then see how much bytes get read
const { buffer, engine } = Zlib.inflateRawSync(chunk, {
chunkSize: 1024 * 1024 * 2,
info: true
})
// Holds how much bytes we read, also where the checksum (should) start
const inflatedLen = engine.bytesRead
// It appears that mc sends extra bytes past the checksum. I don't think this is a raknet
// issue (as we are able to decipher properly, zlib works and should also have a checksum) so
// there needs to be more investigation done. If you know what's wrong here, please make an issue :)
const extraneousLen = chunk.length - inflatedLen - 8
if (extraneousLen > 0) { // Extra bytes
// Info for debugging, todo: use debug()
const extraneousBytes = chunk.slice(inflatedLen + 8)
console.debug('Extraneous bytes!', extraneousLen, extraneousBytes.toString('hex'))
} else if (extraneousLen < 0) {
// No checksum or decompression failed
console.warn('Failed to decrypt', chunk.toString('hex'))
throw new Error('Decrypted packet is missing checksum')
}
const packet = chunk.slice(0, inflatedLen);
const checksum = chunk.slice(inflatedLen, inflatedLen + 8);
const computedCheckSum = computeCheckSum(packet, client.receiveCounter, client.secretKeyBytes)
client.receiveCounter++
if (checksum.toString("hex") == computedCheckSum.toString("hex")) {
client.onDecryptedPacket(buffer)
} else {
console.log('Inflated', inflatedLen, chunk.length, extraneousLen, chunk.toString('hex'))
throw Error(`Checksum mismatch ${checksum.toString("hex")} != ${computedCheckSum.toString("hex")}`)
}
}
client.decipher.on('data', verify)
return (blob) => {
// client.inLog(client.options ? 'C':'S', ' 🔵 Decrypting', client.receiveCounter, blob)
// client.inLog('Using shared key', client.secretKeyBytes, iv)
client.decipher.write(blob)
}
}
module.exports = {
createCipher, createDecipher, createEncryptor, createDecryptor
}
function testDecrypt() {
const client = {
secretKeyBytes: Buffer.from('ZOBpyzki/M8UZv5tiBih048eYOBVPkQE3r5Fl0gmUP4=', 'base64'),
onDecryptedPacket: (...data) => console.log('Decrypted', data)
}
const iv = Buffer.from('ZOBpyzki/M8UZv5tiBih0w==', 'base64')
const decrypt = createDecryptor(client, iv)
console.log('Dec', decrypt(Buffer.from('4B4FCA0C2A4114155D67F8092154AAA5EF', 'hex')))
console.log('Dec 2', decrypt(Buffer.from('DF53B9764DB48252FA1AE3AEE4', 'hex')))
}
// testDecrypt()

View file

@ -1,29 +1,52 @@
var ProtoDef = require('protodef').ProtoDef;
var Serializer = require('protodef').Serializer;
var Parser = require('protodef').Parser;
var protocol = require('minecraft-data')('pe_0.14').protocol;
const { ProtoDefCompiler, CompiledProtodef } = require('protodef').Compiler
const { FullPacketParser, Serializer } = require('protodef')
function createProtocol() {
var proto = new ProtoDef();
proto.addTypes(require('../datatypes/minecraft'));
proto.addTypes(protocol);
const protocol = require('../../data/newproto.json').types
console.log('Proto', protocol)
var compiler = new ProtoDefCompiler()
compiler.addTypesToCompile(protocol)
compiler.addTypes(require('../datatypes/compiler-minecraft'))
compiler.addTypes(require('prismarine-nbt/compiler-zigzag'))
return proto;
const compiledProto = compiler.compileProtoDefSync()
return compiledProto
}
function getProtocol() {
const compiler = new ProtoDefCompiler()
compiler.addTypes(require('../datatypes/compiler-minecraft'))
compiler.addTypes(require('prismarine-nbt/compiler-zigzag'))
const compile = (compiler, file) => {
global.native = compiler.native // eslint-disable-line
const { PartialReadError } = require('protodef/src/utils') // eslint-disable-line
return require(file)() // eslint-disable-line
}
return new CompiledProtodef(
compile(compiler.sizeOfCompiler, '../../data/size.js'),
compile(compiler.writeCompiler, '../../data/write.js'),
compile(compiler.readCompiler, '../../data/read.js')
// compiler.sizeOfCompiler.compile(fs.readFileSync(__dirname + '/../../data/size.js', 'utf-8')),
// compiler.writeCompiler.compile(fs.readFileSync(__dirname + '/../../data/write.js', 'utf-8')),
// compiler.readCompiler.compile(fs.readFileSync(__dirname + '/../../data/read.js', 'utf-8'))
)
}
function createSerializer() {
var proto = createProtocol();
return new Serializer(proto, 'packet');
var proto = getProtocol()
return new Serializer(proto, 'mcpe_packet');
}
function createDeserializer() {
var proto = createProtocol();
return new Parser(proto, 'packet');
var proto = getProtocol()
return new FullPacketParser(proto, 'mcpe_packet');
}
module.exports = {
createDeserializer: createDeserializer,
createSerializer: createSerializer,
createProtocol: createProtocol
};
}

33
test/checksum.js Normal file
View file

@ -0,0 +1,33 @@
const crypto=require("crypto");
const assert=require("assert");
const bufferEqual=require("buffer-equal");
function writeLI64(value, buffer, offset) {
buffer.writeInt32LE(value[0], offset+4);
buffer.writeInt32LE(value[1], offset);
return offset + 8;
}
// based on https://s.yawk.at/QADm and https://confluence.yawk.at/display/PEPROTOCOL/Encryption
describe("checksum",() => {
it("generate hash and checksum",() => {
let packetPlaintext = new Buffer("3C00000008","hex");
let sendCounter = [0,1];
let secretKeyBytes = new Buffer("ZOBpyzki/M8UZv5tiBih048eYOBVPkQE3r5Fl0gmUP4=","base64");
/////
let digest = crypto.createHash('sha256');
// sendCounter to little-endian byte array
let counter=new Buffer(8);
writeLI64(sendCounter,counter,0);
digest.update(counter);
digest.update(packetPlaintext);
digest.update(secretKeyBytes);
let hash = digest.digest();
assert(bufferEqual(hash, new Buffer("WkRtEcDHqlqesU6wdSnIz7cU3OCNKVMIsX3aXZMLRjQ=","base64")),hash.toString("base64"));
let checksum = hash.slice(0,8);
assert(bufferEqual(checksum, new Buffer("5A446D11C0C7AA5A","hex")));
})
});

41
test/decryption.js Normal file
View file

@ -0,0 +1,41 @@
const crypto=require("crypto");
const assert=require("assert");
const bufferEqual=require("buffer-equal");
// based on https://s.yawk.at/8W5U and https://confluence.yawk.at/display/PEPROTOCOL/Encryption
describe("decryption",() => {
let decipher;
before(() => {
let secretKeyBytes = new Buffer("ZOBpyzki/M8UZv5tiBih048eYOBVPkQE3r5Fl0gmUP4=","base64");
/////
let iv = secretKeyBytes.slice(0,16);
assert(bufferEqual(iv, new Buffer("ZOBpyzki/M8UZv5tiBih0w==","base64")));
decipher = crypto.createDecipheriv('aes-256-cfb8', secretKeyBytes, iv);
});
it("decrypt 1",cb => {
let packet1Encrypted = new Buffer("4B4FCA0C2A4114155D67F8092154AAA5EF","hex");
decipher.once('data', packet1Decrypted => {
assert(bufferEqual(packet1Decrypted, new Buffer("0400000000499602D2FC2FCB233F34D5DD", "hex")));
cb();
});
decipher.write(packet1Encrypted);
});
it("decrypt 2",cb => {
let packet2Encrypted = new Buffer("DF53B9764DB48252FA1AE3AEE4","hex");
decipher.once('data', packet2Decrypted => {
assert(bufferEqual(packet2Decrypted,new Buffer("3C000000085A446D11C0C7AA5A","hex")));
cb();
});
decipher.write(packet2Encrypted);
})
});

43
test/ecdh_key_exchange.js Normal file
View file

@ -0,0 +1,43 @@
const crypto=require("crypto");
var Ber = require('asn1').Ber;
const assert=require("assert");
const bufferEqual=require("buffer-equal");
// based on https://s.yawk.at/VZSf and https://confluence.yawk.at/display/PEPROTOCOL/Encryption
// and https://github.com/mhsjlw/pocket-minecraft-protocol/issues/15
describe("ecdh key exchange",() => {
it("generate the secret",() => {
const pubKeyStr = "MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEDEKneqEvcqUqqFMM1HM1A4zWjJC+I8Y+aKzG5dl+6wNOHHQ4NmG2PEXRJYhujyodFH+wO0dEr4GM1WoaWog8xsYQ6mQJAC0eVpBM96spUB1eMN56+BwlJ4H3Qx4TAvAs";
var reader = new Ber.Reader(new Buffer(pubKeyStr, "base64"));
reader.readSequence();
reader.readSequence();
reader.readOID(); // Hey, I'm an elliptic curve
reader.readOID(); // This contains the curve type, could be useful
// The first byte is unused, it contains the "number of unused bits in last octet"
// The pubKey should start at "04" which signifies it is an "uncompressed" public key.
var pubKey = new Buffer(reader.readString(Ber.BitString, true)).slice(1);
// It'd be better to get this from the curve type OID
var server = crypto.createECDH('secp384r1');
//server.generateKeys();
server.setPrivateKey("oH53xXsdMRt6VbjlUUggn/QTcUQUqOHcvHl+U1jaGAUe8TP9H3XdKeoqSAKrKBGG", "base64");
let secret = server.computeSecret(pubKey);
assert(bufferEqual(secret, new Buffer("sM5HvG6efG0RwRe7S+Er9ingxuVzC6HIXmQ1DITVkh4GmX7pboSzbLtaTTNKE8bJ", "base64")));
});
it("create the secret key",() => {
let secret=new Buffer("sM5HvG6efG0RwRe7S+Er9ingxuVzC6HIXmQ1DITVkh4GmX7pboSzbLtaTTNKE8bJ", "base64");
let hash = crypto.createHash('sha256');
hash.update("SO SECRET VERY SECURE");
hash.update(secret);
let secretKey = hash.digest();
let expected=new Buffer("PN/4NCtRswMTwfpOKRecbMncwxa91Fx4QSUlad46jrc","base64");
assert(bufferEqual(secretKey,expected),secretKey.toString("base64")+"!="+expected.toString("base64"));
})
});

296
test/serialization.js Normal file

File diff suppressed because one or more lines are too long

2680
yarn.lock Normal file

File diff suppressed because it is too large Load diff