private XmlDocument CreateInitRequest() { using (new MethodLogger(s_logger)) { try { var initReq = new EbicsRequest { StaticHeader = new StaticHeader { Namespaces = Namespaces, HostId = Config.User.HostId, PartnerId = Config.User.PartnerId, UserId = Config.User.UserId, SecurityMedium = Params.SecurityMedium, Nonce = CryptoUtils.GetNonce(), Timestamp = CryptoUtils.GetUtcTimeNow(), BankPubKeyDigests = new BankPubKeyDigests { Namespaces = Namespaces, Bank = Config.Bank, DigestAlgorithm = s_digestAlg }, OrderDetails = new OrderDetails { Namespaces = Namespaces, OrderAttribute = OrderAttribute, OrderType = OrderType, StandardOrderParams = new StartEndDateOrderParams { Namespaces = Namespaces, StartDate = Params.StartDate, EndDate = Params.EndDate } } }, MutableHeader = new MutableHeader { Namespaces = Namespaces, TransactionPhase = "Initialisation" }, Body = new Body { Namespaces = Namespaces }, Namespaces = Namespaces, Version = Config.Version, Revision = Config.Revision, }; return(AuthenticateXml(initReq.Serialize().ToXmlDocument(), null, null)); } catch (EbicsException) { throw; } catch (Exception ex) { throw new CreateRequestException($"can't create {OrderType} init request", ex); } } }
/// <summary> /// Creates secure message /// </summary> /// <param name="payload"></param> /// <param name="senderPrivateKey"></param> /// <param name="receiverPublicKey"></param> /// <returns></returns> public static SecureMessage Create(string payload, string senderPrivateKey, string receiverPublicKey) { return(new SecureMessage(CryptoUtils.Encode(payload, senderPrivateKey, receiverPublicKey).FromHex())); }
/// <inheritdoc /> public async Task ProvisionAgentAsync(AgentOptions agentOptions) { if (agentOptions is null) { throw new ArgumentNullException(nameof(agentOptions)); } // Create agent wallet await WalletService.CreateWalletAsync( configuration : agentOptions.WalletConfiguration, credentials : agentOptions.WalletCredentials); var wallet = await WalletService.GetWalletAsync( configuration : agentOptions.WalletConfiguration, credentials : agentOptions.WalletCredentials); // Configure agent endpoint AgentEndpoint endpoint = null; if (agentOptions.EndpointUri != null) { endpoint = new AgentEndpoint { Uri = agentOptions.EndpointUri.ToString() }; if (agentOptions.AgentKeySeed != null) { var agent = await Did.CreateAndStoreMyDidAsync(wallet, new { seed = agentOptions.AgentKeySeed }.ToJson()); endpoint.Did = agent.Did; endpoint.Verkey = new[] { agent.VerKey }; } else if (agentOptions.AgentKey != null) { endpoint.Did = agentOptions.AgentDid; endpoint.Verkey = new[] { agentOptions.AgentKey }; } else { var agent = await Did.CreateAndStoreMyDidAsync(wallet, "{}"); endpoint.Did = agent.Did; endpoint.Verkey = new[] { agent.VerKey }; } } var masterSecretId = await AnonCreds.ProverCreateMasterSecretAsync(wallet, null); var record = new ProvisioningRecord { MasterSecretId = masterSecretId, Endpoint = endpoint, Owner = { Name = agentOptions.AgentName, ImageUrl = agentOptions.AgentImageUri } }; // Issuer Configuration if (agentOptions.IssuerKeySeed == null) { agentOptions.IssuerKeySeed = CryptoUtils.GetUniqueKey(32); } var issuer = await Did.CreateAndStoreMyDidAsync( wallet : wallet, didJson : new { did = agentOptions.IssuerDid, seed = agentOptions.IssuerKeySeed }.ToJson()); record.IssuerSeed = agentOptions.IssuerKeySeed; record.IssuerDid = issuer.Did; record.IssuerVerkey = issuer.VerKey; record.TailsBaseUri = agentOptions.EndpointUri != null ? new Uri(new Uri(agentOptions.EndpointUri), "tails/").ToString() : null; record.SetTag("AgentKeySeed", agentOptions.AgentKeySeed); record.SetTag("IssuerKeySeed", agentOptions.IssuerKeySeed); // Add record to wallet await RecordService.AddAsync(wallet, record); }
public override void Verify() { // 1. Verify that attStmt is valid CBOR conforming to the syntax defined above and perform CBOR decoding on it to extract the contained fields. // (handled in base class) if (null == Sig || CBORType.ByteString != Sig.Type || 0 == Sig.GetByteString().Length) { throw new VerificationException("Invalid TPM attestation signature"); } if ("2.0" != attStmt["ver"].AsString()) { throw new VerificationException("FIDO2 only supports TPM 2.0"); } // Verify that the public key specified by the parameters and unique fields of pubArea // is identical to the credentialPublicKey in the attestedCredentialData in authenticatorData PubArea pubArea = null; if (null != attStmt["pubArea"] && CBORType.ByteString == attStmt["pubArea"].Type && 0 != attStmt["pubArea"].GetByteString().Length) { pubArea = new PubArea(attStmt["pubArea"].GetByteString()); } if (null == pubArea || null == pubArea.Unique || 0 == pubArea.Unique.Length) { throw new VerificationException("Missing or malformed pubArea"); } var coseKty = CredentialPublicKey[CBORObject.FromObject(COSE.KeyCommonParameter.KeyType)].AsInt32(); if (3 == coseKty) // RSA { var coseMod = CredentialPublicKey[CBORObject.FromObject(COSE.KeyTypeParameter.N)].GetByteString(); // modulus var coseExp = CredentialPublicKey[CBORObject.FromObject(COSE.KeyTypeParameter.E)].GetByteString(); // exponent if (!coseMod.ToArray().SequenceEqual(pubArea.Unique.ToArray())) { throw new VerificationException("Public key mismatch between pubArea and credentialPublicKey"); } if ((coseExp[0] + (coseExp[1] << 8) + (coseExp[2] << 16)) != pubArea.Exponent) { throw new VerificationException("Public key exponent mismatch between pubArea and credentialPublicKey"); } } else if (2 == coseKty) // ECC { var curve = CredentialPublicKey[CBORObject.FromObject(COSE.KeyTypeParameter.Crv)].AsInt32(); var X = CredentialPublicKey[CBORObject.FromObject(COSE.KeyTypeParameter.X)].GetByteString(); var Y = CredentialPublicKey[CBORObject.FromObject(COSE.KeyTypeParameter.Y)].GetByteString(); if (pubArea.EccCurve != CoseCurveToTpm[curve]) { throw new VerificationException("Curve mismatch between pubArea and credentialPublicKey"); } if (!pubArea.ECPoint.X.SequenceEqual(X)) { throw new VerificationException("X-coordinate mismatch between pubArea and credentialPublicKey"); } if (!pubArea.ECPoint.Y.SequenceEqual(Y)) { throw new VerificationException("Y-coordinate mismatch between pubArea and credentialPublicKey"); } } // Concatenate authenticatorData and clientDataHash to form attToBeSigned. // see data variable // Validate that certInfo is valid CertInfo certInfo = null; if (null != attStmt["certInfo"] && CBORType.ByteString == attStmt["certInfo"].Type && 0 != attStmt["certInfo"].GetByteString().Length) { certInfo = new CertInfo(attStmt["certInfo"].GetByteString()); } if (null == certInfo) { throw new VerificationException("CertInfo invalid parsing TPM format attStmt"); } // 4a. Verify that magic is set to TPM_GENERATED_VALUE // Handled in CertInfo constructor, see CertInfo.Magic // 4b. Verify that type is set to TPM_ST_ATTEST_CERTIFY // Handled in CertInfo constructor, see CertInfo.Type // 4c. Verify that extraData is set to the hash of attToBeSigned using the hash algorithm employed in "alg" if (null == Alg || true != Alg.IsNumber) { throw new VerificationException("Invalid TPM attestation algorithm"); } using (var hasher = CryptoUtils.GetHasher(CryptoUtils.HashAlgFromCOSEAlg(Alg.AsInt32()))) { if (!hasher.ComputeHash(Data).SequenceEqual(certInfo.ExtraData)) { throw new VerificationException("Hash value mismatch extraData and attToBeSigned"); } } // 4d. Verify that attested contains a TPMS_CERTIFY_INFO structure, whose name field contains a valid Name for pubArea, as computed using the algorithm in the nameAlg field of pubArea using (var hasher = CryptoUtils.GetHasher(CryptoUtils.HashAlgFromCOSEAlg(certInfo.Alg))) { if (false == hasher.ComputeHash(pubArea.Raw).SequenceEqual(certInfo.AttestedName)) { throw new VerificationException("Hash value mismatch attested and pubArea"); } } // 4e. Note that the remaining fields in the "Standard Attestation Structure" [TPMv2-Part1] section 31.2, i.e., qualifiedSigner, clockInfo and firmwareVersion are ignored. These fields MAY be used as an input to risk engines. // 5. If x5c is present, this indicates that the attestation type is not ECDAA if (null != X5c && CBORType.Array == X5c.Type && 0 != X5c.Count) { if (null == X5c.Values || 0 == X5c.Values.Count || CBORType.ByteString != X5c.Values.First().Type || 0 == X5c.Values.First().GetByteString().Length) { throw new VerificationException("Malformed x5c in TPM attestation"); } // 5a. Verify the sig is a valid signature over certInfo using the attestation public key in aikCert with the algorithm specified in alg. var aikCert = new X509Certificate2(X5c.Values.First().GetByteString()); var cpk = new CredentialPublicKey(aikCert, Alg.AsInt32()); if (true != cpk.Verify(certInfo.Raw, Sig.GetByteString())) { throw new VerificationException("Bad signature in TPM with aikCert"); } // 5b. Verify that aikCert meets the TPM attestation statement certificate requirements // https://www.w3.org/TR/webauthn/#tpm-cert-requirements // 5bi. Version MUST be set to 3 if (3 != aikCert.Version) { throw new VerificationException("aikCert must be V3"); } // 5bii. Subject field MUST be set to empty - they actually mean subject name if (0 != aikCert.SubjectName.Name.Length) { throw new VerificationException("aikCert subject must be empty"); } // 5biii. The Subject Alternative Name extension MUST be set as defined in [TPMv2-EK-Profile] section 3.2.9. // https://www.w3.org/TR/webauthn/#tpm-cert-requirements (string tpmManufacturer, string tpmModel, string tpmVersion) = SANFromAttnCertExts(aikCert.Extensions); // From https://www.trustedcomputinggroup.org/wp-content/uploads/Credential_Profile_EK_V2.0_R14_published.pdf // "The issuer MUST include TPM manufacturer, TPM part number and TPM firmware version, using the directoryName // form within the GeneralName structure. The ASN.1 encoding is specified in section 3.1.2 TPM Device // Attributes. In accordance with RFC 5280[11], this extension MUST be critical if subject is empty // and SHOULD be non-critical if subject is non-empty" // Best I can figure to do for now ? // id:49465800 'IFX' Infinion Model and Version are empty if (string.Empty == tpmManufacturer || string.Empty == tpmModel || string.Empty == tpmVersion) { throw new VerificationException("SAN missing TPMManufacturer, TPMModel, or TPMVersion from TPM attestation certificate"); } if (false == TPMManufacturers.Contains(tpmManufacturer)) { throw new VerificationException("Invalid TPM manufacturer found parsing TPM attestation"); } // 5biiii. The Extended Key Usage extension MUST contain the "joint-iso-itu-t(2) internationalorganizations(23) 133 tcg-kp(8) tcg-kp-AIKCertificate(3)" OID. // OID is 2.23.133.8.3 var EKU = EKUFromAttnCertExts(aikCert.Extensions, "2.23.133.8.3"); if (!EKU) { throw new VerificationException("aikCert EKU missing tcg-kp-AIKCertificate OID"); } // 5biiiii. The Basic Constraints extension MUST have the CA component set to false. if (IsAttnCertCACert(aikCert.Extensions)) { throw new VerificationException("aikCert Basic Constraints extension CA component must be false"); } // 5biiiiii. An Authority Information Access (AIA) extension with entry id-ad-ocsp and a CRL Distribution Point extension [RFC5280] // are both OPTIONAL as the status of many attestation certificates is available through metadata services. See, for example, the FIDO Metadata Service [FIDOMetadataService]. var trustPath = X5c.Values .Select(x => new X509Certificate2(x.GetByteString())) .ToArray(); var entry = _metadataService?.GetEntry(AuthData.AttestedCredentialData.AaGuid); // while conformance testing, we must reject any authenticator that we cannot get metadata for if (_metadataService?.ConformanceTesting() == true && null == entry) { throw new VerificationException("AAGUID not found in MDS test metadata"); } if (_requireValidAttestationRoot) { // If the authenticator is listed as in the metadata as one that should produce a basic full attestation, build and verify the chain if ((entry?.MetadataStatement?.AttestationTypes.Contains((ushort)MetadataAttestationType.ATTESTATION_BASIC_FULL) ?? false) || (entry?.MetadataStatement?.AttestationTypes.Contains((ushort)MetadataAttestationType.ATTESTATION_ATTCA) ?? false) || (entry?.MetadataStatement?.AttestationTypes.Contains((ushort)MetadataAttestationType.ATTESTATION_HELLO) ?? false)) { var attestationRootCertificates = entry.MetadataStatement.AttestationRootCertificates .Select(x => new X509Certificate2(Convert.FromBase64String(x))) .ToArray(); if (false == ValidateTrustChain(trustPath, attestationRootCertificates)) { throw new VerificationException("TPM attestation failed chain validation"); } } } // 5c. If aikCert contains an extension with OID 1.3.6.1.4.1.45724.1.1.4 (id-fido-gen-ce-aaguid) verify that the value of this extension matches the aaguid in authenticatorData var aaguid = AaguidFromAttnCertExts(aikCert.Extensions); if ((null != aaguid) && (!aaguid.SequenceEqual(Guid.Empty.ToByteArray())) && (0 != AttestedCredentialData.FromBigEndian(aaguid).CompareTo(AuthData.AttestedCredentialData.AaGuid))) { throw new VerificationException(string.Format("aaguid malformed, expected {0}, got {1}", AuthData.AttestedCredentialData.AaGuid, new Guid(aaguid))); } } // If ecdaaKeyId is present, then the attestation type is ECDAA else if (null != EcdaaKeyId) { // Perform ECDAA-Verify on sig to verify that it is a valid signature over certInfo // https://www.w3.org/TR/webauthn/#biblio-fidoecdaaalgorithm throw new VerificationException("ECDAA support for TPM attestation is not yet implemented"); // If successful, return attestation type ECDAA and the identifier of the ECDAA-Issuer public key ecdaaKeyId. //attnType = AttestationType.ECDAA; //trustPath = ecdaaKeyId; } else { throw new VerificationException("Neither x5c nor ECDAA were found in the TPM attestation statement"); } }
public IActionResult SubmitMinedBlock([FromBody] SubmitMinedBlockRequestModel block) { //let blockDataHash = req.body.blockDataHash; //let dateCreated = req.body.dateCreated; //let nonce = req.body.nonce; //let blockHash = req.body.blockHash; try { var result = this.GetNodeSingleton().Chain.SubmitMinedBlock( CryptoUtils.HexToBytes(block.BlockDataHash), block.DateCreated, block.Nonce, CryptoUtils.HexToBytes(block.BlockHash)); this.GetNodeSingleton().NotifyPeersAboutNewBlock(); return(Ok(new { message = string.Format("Block accepted, reward paid: {0} microcoins", result.Transactions[0].Value) })); } catch (Exception ex) { return(BadRequest(ex.Message)); } }
protected void DecodeCert(McpeLogin message) { _playerInfo = new PlayerInfo(); // Get bytes byte[] buffer = message.payload; //Log.Debug($"Unknown byte in login packet is: {message.unknown}"); if (message.payload.Length != buffer.Length) { Log.Debug($"Wrong lenght {message.payload.Length} != {message.payload.Length}"); throw new Exception($"Wrong lenght {message.payload.Length} != {message.payload.Length}"); } // Decompress bytes Log.Debug("Lenght: " + message.payload.Length + ", Message: " + Convert.ToBase64String(buffer)); //MemoryStream stream = new MemoryStream(buffer); //if (stream.ReadByte() != 0x78) //{ // throw new InvalidDataException("Incorrect ZLib header. Expected 0x78 0x9C"); //} //stream.ReadByte(); string certificateChain; string skinData; //using (var defStream2 = new DeflateStream(stream, CompressionMode.Decompress, false)) { // Get actual package out of bytes //using (MemoryStream destination = MiNetServer.MemoryStreamManager.GetStream()) { //defStream2.CopyTo(destination); var destination = new MemoryStream(buffer); destination.Position = 0; NbtBinaryReader reader = new NbtBinaryReader(destination, false); try { var countCertData = reader.ReadInt32(); Log.Debug("Count cert: " + countCertData); certificateChain = Encoding.UTF8.GetString(reader.ReadBytes(countCertData)); Log.Debug("Decompressed certificateChain " + certificateChain); var countSkinData = reader.ReadInt32(); Log.Debug("Count skin: " + countSkinData); skinData = Encoding.UTF8.GetString(reader.ReadBytes(countSkinData)); Log.Debug("Decompressed skinData" + skinData); } catch (Exception e) { Log.Error("Parsing login", e); return; } } } try { { if (Log.IsDebugEnabled) { Log.Debug("Input SKIN string: " + skinData); } IDictionary <string, dynamic> headers = JWT.Headers(skinData); dynamic payload = JObject.Parse(JWT.Payload(skinData)); if (Log.IsDebugEnabled) { Log.Debug($"Skin JWT Header: {string.Join(";", headers)}"); } if (Log.IsDebugEnabled) { Log.Debug($"Skin JWT Payload:\n{payload.ToString()}"); } // Skin JWT Payload: //{ // "ADRole": 2, // "ClientRandomId": 1423700530444426768, // "CurrentInputMode": 1, // "DefaultInputMode": 1, // "DeviceModel": "ASUSTeK COMPUTER INC. N550JK", // "DeviceOS": 7, // "GameVersion": "1.1.0", // "GuiScale": 0, // "LanguageCode": "en_US", // "ServerAddress": "192.168.0.3:19132", // "SkinData": "SnNH/1+KUf97n2T/AAAAAAAAAAAAAAAAAAAAAAAAAACWlY//q6ur/5aVj/+WlY//q6ur/5aVj/+WlY//q6ur/1JSUv9zbmr/c25q/1JSUv9zbmr/UlJS/3Nuav9zbmr/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBfQ/+WlY//q6ur/7+/v/8AAAAAAAAAAAAAAAAAAAAAQF9D/0pzR/9filH/SnNH/0BfQ/9Kc0f/SnNH/0BfQ/9zbmr/c25q/3Nuav9SUlL/c25q/1JSUv9zbmr/c25q/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA7Sz7/c25q/1QxKP9wTTr/jGVJ/wAAAAAAAAAAAAAAAEpzR/9Kc0f/X4pR/1+KUf9Kc0f/SnNH/1+KUf9Kc0f/UlJS/1JSUv9SUlL/UlJS/1JSUv9SUlL/UlJS/3Nuav8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFJSUv87IBz/AAAAAAAAAAAAAAAAAAAAAAAAAABfilH/X4pR/1+KUf9filH/X4pR/1+KUf9Kc0f/X4pR/ztLPv87Sz7/O0s+/ztLPv87Sz7/O0s+/ztLPv87Sz7/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIRMT/wAAAAAAAAAAAAAAAAAAAAAAAAAAX4pR/1+KUf9filH/e59k/1+KUf9filH/SnNH/1+KUf87Sz7/O0s+/ztLPv87Sz7/O0s+/ztLPv87Sz7/O0s+/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEpzR/9Kc0f/X4pR/1+KUf9filH/SnNH/0BfQ/9Kc0f/O0s+/ztLPv87Sz7/O0s+/ztLPv87Sz7/O0s+/ztLPv8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABKc0f/QF9D/0pzR/9filH/X4pR/0pzR/9AX0P/SnNH/0BfQ/87Sz7/QF9D/0BfQ/9AX0P/QF9D/ztLPv9AX0P/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQF9D/0pzR/9filH/X4pR/1+KUf9filH/SnNH/0BfQ/9AX0P/QF9D/0pzR/9Kc0f/SnNH/0pzR/9AX0P/QF9D/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACrq6v/QF9D/0pzR/9filH/X4pR/0pzR/9Kc0f/QF9D/0BfQ/9Kc0f/X4pR/1+KUf9filH/X4pR/0pzR/9AX0P/QF9D/0pzR/9Kc0f/X4pR/1+KUf9Kc0f/QF9D/5aVj/+rq6v/lpWP/5aVj/+rq6v/lpWP/5aVj/+rq6v/lpWP/1+KUf9filH/X4pR/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAF+KUf9filH/X4pR/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAq6ur/6urq/9filH/e59k/1+KUf9filH/SnNH/0pzR/9Kc0f/SnNH/0pzR/9filH/X4pR/0pzR/9Kc0f/SnNH/0pzR/9Kc0f/X4pR/1+KUf97n2T/X4pR/6urq/+WlY//q6ur/6urq/+WlY//lpWP/6urq/+WlY//q6ur/5aVj/9filH/QF9D/0pzR/9filH/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAF+KUf9Kc0f/QF9D/1+KUf8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJaVj/+rq6v/X4pR/1+KUf9filH/SnNH/0BfQ/9AX0P/QF9D/0BfQ/9Kc0f/SnNH/0pzR/9Kc0f/QF9D/0BfQ/9AX0P/QF9D/0pzR/9filH/X4pR/1+KUf+rq6v/lpWP/5aVj/+rq6v/q6ur/5aVj/+WlY//q6ur/6urq/+rq6v/AAAAAEBfQ/9Kc0f/QF9D/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAX0P/SnNH/0BfQ/8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABzbmr/q6ur/0pzR/9filH/SnNH/0pzR/9AX0P/SnNH/0BfQ//Z2dD/AAAA/1+KUf9AX0P/AAAA/9nZ0P9AX0P/SnNH/0BfQ/9Kc0f/SnNH/1+KUf9Kc0f/q6ur/6urq/9zbmr/q6ur/6urq/+rq6v/lpWP/6urq/+WlY//q6ur/wAAAABKc0f/X4pR/0pzR/9AX0P/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAX0P/SnNH/1+KUf9Kc0f/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAc25q/5aVj/9AX0P/X4pR/1+KUf9Kc0f/QF9D/1+KUf9AX0P/X4pR/1+KUf9Kc0f/QF9D/1+KUf9filH/QF9D/1+KUf9AX0P/SnNH/1+KUf9filH/QF9D/5aVj/+rq6v/c25q/5aVj/+WlY//q6ur/3Nuav+rq6v/lpWP/5aVj/8AAAAAAAAAAEpzR/9AX0P/QF9D/0pzR/8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABKc0f/QF9D/0BfQ/9Kc0f/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFJSUv+rq6v/lpWP/1+KUf9Kc0f/X4pR/0pzR/9filH/X4pR/1+KUf9Kc0f/SnNH/0pzR/9Kc0f/X4pR/1+KUf9filH/SnNH/1+KUf9Kc0f/X4pR/6urq/+WlY//q6ur/1JSUv+WlY//c25q/6urq/9zbmr/lpWP/6urq/9zbmr/AAAAAAAAAAAAAAAASnNH/0BfQ/9AX0P/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQF9D/0BfQ/9Kc0f/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABSUlL/lpWP/3Nuav+WlY//QF9D/0pzR/9Kc0f/SnNH/0pzR/9Kc0f/SnNH/wAAAP8AAAD/SnNH/0pzR/9Kc0f/SnNH/0pzR/9Kc0f/QF9D/5aVj/+rq6v/c25q/5aVj/9SUlL/c25q/3Nuav+WlY//UlJS/5aVj/+rq6v/c25q/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAUlJS/5aVj/9SUlL/lpWP/1JSUv87Sz7/QF9D/0BfQ/9AX0P/QF9D/0pzR/9Kc0f/SnNH/0pzR/9AX0P/QF9D/0BfQ/9AX0P/O0s+/5aVj/9zbmr/lpWP/3Nuav+WlY//UlJS/3Nuav9SUlL/c25q/1JSUv9zbmr/lpWP/1JSUv8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB0Y1T/UktM/1JLTP9SS0z/SnNH/0pzR/9AX0P/O0s+/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAUktM/5eQcv90Y1T/UktM/1JLTP90Y1T/l5By/1JLTP9SS0z/UktM/1JLTP9SS0z/UktM/1JLTP9SS0z/UktM/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAqqma/5eQcv+XkHL/dGNU/0BfQ/9AX0P/O0s+/0pzR/8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAl5By/3RjVP9SS0z/UktM/0pzR/9AX0P/O0s+/ztLPv8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFJLTP8hExP/IRMT/yETE/8hExP/IRMT/yETE/9SS0z/UktM/1JLTP9SS0z/UktM/1JLTP9SS0z/UktM/1JLTP8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKqpmv+qqZr/qqma/5eQcv9Kc0f/v7+4/0BfQ/9AX0P/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJeQcv90Y1T/UktM/zsgHP9Kc0f/QF9D/ztLPv87Sz7/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABSS0z/IRMT/yETE/8hExP/IRMT/yETE/8hExP/UktM/1JLTP9SS0z/UktM/yETE/8hExP/UktM/1JLTP9SS0z/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACqqZr/qqma/6qpmv+XkHL/QF9D/0BfQ/87Sz7/QF9D/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB0Y1T/UktM/1JLTP87IBz/QF9D/0pzR/9AX0P/O0s+/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAUktM/yETE/8hExP/IRMT/yETE/8hExP/IRMT/1JLTP9SS0z/UktM/1JLTP87IBz/IRMT/1JLTP9SS0z/UktM/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAqqma/5eQcv+XkHL/dGNU/0pzR/+/v7j/QF9D/0pzR/8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACXkHL/c2Rk/3NkZP+XkHL/l5By/3RjVP9SS0z/IRMT/yETE/8hExP/UktM/1JLTP9SS0z/UktM/3RjVP+XkHL/dGNU/1JLTP9SS0z/UktM/1JLTP8hExP/IRMT/zsgHP87IBz/IRMT/yETE/9SS0z/UktM/1JLTP9SS0z/dGNU/3RjVP+qqZr/l5By/3RjVP90Y1T/l5By/6qpmv90Y1T/qqma/7+/uP+/v7j/qqma/6qpmv+XkHL/dGNU/1JLTP9SS0z/UktM/1JLTP9SS0z/UktM/3RjVP+XkHL/qqma/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAl5By/3NkZP9zZGT/l5By/6qpmv+XkHL/dGNU/yETE/8hExP/IRMT/1JLTP9SS0z/UktM/3RjVP+XkHL/qqma/5eQcv90Y1T/UktM/1JLTP90Y1T/OyAc/1QxKP9UMSj/VDEo/1QxKP87IBz/dGNU/1JLTP9SS0z/dGNU/5eQcv+qqZr/v7+4/6qpmv+XkHL/l5By/6qpmv+/v7j/qqma/7+/uP+/v7j/v7+4/7+/uP+/v7j/qqma/5eQcv9SS0z/UktM/1JLTP9SS0z/UktM/1JLTP+XkHL/qqma/7+/uP8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKqpmv9zZGT/c2Rk/6qpmv+qqZr/l5By/3RjVP87IBz/IRMT/yETE/9SS0z/UktM/1JLTP+XkHL/qqma/6qpmv+XkHL/dGNU/1JLTP90Y1T/l5By/1QxKP9wTTr/cE06/3BNOv9wTTr/OyAc/5eQcv90Y1T/UktM/3RjVP+XkHL/qqma/6qpmv90Y1T/qqma/6qpmv90Y1T/qqma/6qpmv+qqZr/v7+4/7+/uP+qqZr/qqma/6qpmv+XkHL/dGNU/1JLTP9SS0z/UktM/1JLTP90Y1T/l5By/6qpmv+qqZr/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACqqZr/c2Rk/7+/uP+qqZr/qqma/6qpmv+XkHL/OyAc/yETE/8hExP/UktM/1JLTP90Y1T/l5By/6qpmv+qqZr/dGNU/1JLTP90Y1T/l5By/6qpmv87IBz/cE06/4xlSf9wTTr/VDEo/zsgHP+qqZr/l5By/3RjVP9SS0z/dGNU/5eQcv+qqZr/dGNU/5eQcv+XkHL/dGNU/6qpmv+XkHL/l5By/6qpmv+qqZr/l5By/5eQcv+XkHL/l5By/3RjVP9SS0z/UktM/1JLTP9SS0z/dGNU/5eQcv+XkHL/l5By/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAv7+4/7+/uP+/v7j/v7+4/6qpmv+XkHL/l5By/zsgHP8hExP/IRMT/1JLTP9SS0z/dGNU/5eQcv+XkHL/qqma/1JLTP9SS0z/UktM/3RjVP+XkHL/OyAc/1QxKP9wTTr/jGVJ/3BNOv+qqZr/l5By/3RjVP9SS0z/UktM/1JLTP9SS0z/dGNU/5eQcv+qqZr/qqma/5eQcv90Y1T/UktM/3RjVP+XkHL/l5By/3RjVP90Y1T/UktM/1JLTP9SS0z/UktM/1JLTP9SS0z/UktM/1JLTP9SS0z/UktM/3RjVP8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKqpmv+/v7j/v7+4/6qpmv+XkHL/dGNU/1JLTP8hExP/IRMT/yETE/9SS0z/UktM/1JLTP9SS0z/dGNU/5eQcv90Y1T/UktM/1JLTP9SS0z/dGNU/6qpmv9UMSj/cE06/3BNOv9UMSj/qqma/3RjVP9SS0z/UktM/1JLTP90Y1T/l5By/6qpmv+qqZr/v7+4/7+/uP+qqZr/qqma/5eQcv+XkHL/qqma/6qpmv+XkHL/l5By/5eQcv+XkHL/dGNU/1JLTP9SS0z/UktM/1JLTP90Y1T/l5By/5eQcv+XkHL/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACqqZr/qqma/6qpmv+qqZr/qqma/5eQcv+XkHL/OyAc/yETE/8hExP/UktM/1JLTP90Y1T/l5By/5eQcv+qqZr/l5By/3RjVP9SS0z/dGNU/5eQcv+qqZr/VDEo/3BNOv9wTTr/OyAc/6qpmv+XkHL/dGNU/1JLTP90Y1T/l5By/6qpmv+/v7j/v7+4/7+/uP+/v7j/v7+4/7+/uP+qqZr/qqma/7+/uP+/v7j/qqma/6qpmv+qqZr/l5By/3RjVP9SS0z/UktM/1JLTP9SS0z/dGNU/5eQcv+qqZr/qqma/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAl5By/6qpmv+qqZr/l5By/6qpmv+qqZr/l5By/zsgHP8hExP/IRMT/1JLTP9SS0z/dGNU/5eQcv+qqZr/qqma/3RjVP90Y1T/UktM/3RjVP+XkHL/qqma/zsgHP9wTTr/VDEo/zsgHP+qqZr/l5By/3RjVP9SS0z/UktM/3RjVP90Y1T/l5By/6qpmv+/v7j/v7+4/6qpmv+XkHL/dGNU/7+/uP+/v7j/v7+4/7+/uP+/v7j/qqma/5eQcv9SS0z/UktM/1JLTP9SS0z/UktM/1JLTP+XkHL/qqma/7+/uP8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJeQcv+XkHL/l5By/5eQcv+XkHL/dGNU/1QxKP87IBz/IRMT/yETE/9SS0z/UktM/1JLTP9SS0z/dGNU/5eQcv90Y1T/dGNU/1JLTP90Y1T/dGNU/6qpmv87IBz/VDEo/3BNOv+qqZr/qqma/3RjVP9SS0z/UktM/1JLTP9SS0z/dGNU/5eQcv+qqZr/v7+4/7+/uP+qqZr/l5By/3RjVP+qqZr/v7+4/7+/uP+qqZr/qqma/5eQcv90Y1T/UktM/1JLTP9SS0z/UktM/1JLTP9SS0z/dGNU/5eQcv+qqZr/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABKc0f/X4pR/1+KUf9Kc0f/SnNH/0BfQ/9AX0P/O0s+/ztLPv87Sz7/O0s+/ztLPv87Sz7/QF9D/0pzR/9Kc0f/dGNU/3RjVP9SS0z/dGNU/3RjVP+XkHL/qqma/3BNOv9UMSj/qqma/5eQcv90Y1T/UktM/1JLTP9SS0z/UktM/1JLTP+XkHL/qqma/6qpmv+qqZr/qqma/5eQcv9SS0z/X4pR/1+KUf9filH/X4pR/1+KUf9Kc0f/SnNH/0BfQ/87Sz7/O0s+/ztLPv87Sz7/QF9D/0pzR/9Kc0f/X4pR/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAX4pR/1+KUf97n2T/X4pR/1+KUf9filH/SnNH/ztLPv87Sz7/O0s+/ztLPv87Sz7/O0s+/0pzR/9filH/X4pR/3RjVP90Y1T/UktM/3RjVP9SS0z/l5By/5eQcv9wTTr/OyAc/5eQcv+XkHL/UktM/1JLTP9SS0z/UktM/1JLTP9SS0z/dGNU/5eQcv+XkHL/l5By/5eQcv90Y1T/UktM/0pzR/9filH/SnNH/1+KUf9Kc0f/QF9D/ztLPv9Kc0f/QF9D/ztLPv87Sz7/QF9D/0pzR/87Sz7/QF9D/0pzR/8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAF+KUf9filH/X4pR/0pzR/+/v7j/QF9D/7+/uP87Sz7/O0s+/ztLPv87Sz7/O0s+/ztLPv9AX0P/QF9D/0pzR/90Y1T/dGNU/1JLTP90Y1T/UktM/1JLTP90Y1T/VDEo/zsgHP90Y1T/UktM/1JLTP9SS0z/UktM/1JLTP9SS0z/UktM/1JLTP9SS0z/dGNU/3RjVP9SS0z/UktM/1JLTP9AX0P/SnNH/0BfQ/9Kc0f/SnNH/0pzR/9AX0P/SnNH/0BfQ/87Sz7/O0s+/0BfQ/9Kc0f/QF9D/0pzR/9Kc0f/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=", // "SkinId": "Standard_Custom", // "TenantId": "", // "UIProfile": 0 //} try { _playerInfo.ADRole = payload.ADRole; _playerInfo.ClientId = payload.ClientRandomId; _playerInfo.CurrentInputMode = payload.CurrentInputMode; _playerInfo.DefaultInputMode = payload.DefaultInputMode; _playerInfo.DeviceModel = payload.DeviceModel; _playerInfo.DeviceOS = payload.DeviceOS; _playerInfo.GameVersion = payload.GameVersion; _playerInfo.GuiScale = payload.GuiScale; _playerInfo.LanguageCode = payload.LanguageCode; _playerInfo.ServerAddress = payload.ServerAddress; _playerInfo.UIProfile = payload.UIProfile; _playerInfo.Skin = new Skin() { SkinType = payload.SkinId, Texture = Convert.FromBase64String((string)payload.SkinData), }; } catch (Exception e) { Log.Error("Skin info", e); } } { if (Log.IsDebugEnabled) { Log.Debug("Input JSON string: " + certificateChain); } dynamic json = JObject.Parse(certificateChain); if (Log.IsDebugEnabled) { Log.Debug($"JSON:\n{json}"); } string validationKey = null; foreach (dynamic o in json.chain) { IDictionary <string, dynamic> headers = JWT.Headers(o.ToString()); if (Log.IsDebugEnabled) { Log.Debug("Raw chain element:\n" + o.ToString()); Log.Debug($"JWT Header: {string.Join(";", headers)}"); dynamic jsonPayload = JObject.Parse(JWT.Payload(o.ToString())); Log.Debug($"JWT Payload:\n{jsonPayload}"); } // x5u cert (string): MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8ELkixyLcwlZryUQcu1TvPOmI2B7vX83ndnWRUaXm74wFfa5f/lwQNTfrLVHa2PmenpGI6JhIMUJaWZrjmMj90NoKNFSNBuKdm8rYiXsfaz3K36x/1U26HpG0ZxK/V1V if (headers.ContainsKey("x5u")) { string certString = headers["x5u"]; if (Log.IsDebugEnabled) { Log.Debug($"x5u cert (string): {certString}"); ECDiffieHellmanPublicKey publicKey = CryptoUtils.CreateEcDiffieHellmanPublicKey(certString); Log.Debug($"Cert:\n{publicKey.ToXmlString()}"); } // Validate CngKey newKey = CryptoUtils.ImportECDsaCngKeyFromString(certString); CertificateData data = JWT.Decode <CertificateData>(o.ToString(), newKey); if (data != null) { if (Log.IsDebugEnabled) { Log.Debug("Decoded token success"); } if (CertificateData.MojangRootKey.Equals(certString, StringComparison.InvariantCultureIgnoreCase)) { Log.Debug("Got Mojang key. Is valid = " + data.CertificateAuthority); validationKey = data.IdentityPublicKey; } else if (validationKey != null && validationKey.Equals(certString, StringComparison.InvariantCultureIgnoreCase)) { _playerInfo.CertificateData = data; } else { if (data.ExtraData == null) { continue; } // Self signed, make sure they don't fake XUID if (data.ExtraData.Xuid != null) { Log.Warn("Received fake XUID from " + data.ExtraData.DisplayName); data.ExtraData.Xuid = null; } _playerInfo.CertificateData = data; } } else { Log.Error("Not a valid Identity Public Key for decoding"); } } } //TODO: Implement disconnect here { _playerInfo.Username = _playerInfo.CertificateData.ExtraData.DisplayName; _session.Username = _playerInfo.Username; string identity = _playerInfo.CertificateData.ExtraData.Identity; if (Log.IsDebugEnabled) { Log.Debug($"Connecting user {_playerInfo.Username} with identity={identity}"); } _playerInfo.ClientUuid = new UUID(identity); _session.CryptoContext = new CryptoContext { UseEncryption = Config.GetProperty("UseEncryptionForAll", false) || (Config.GetProperty("UseEncryption", true) && !string.IsNullOrWhiteSpace(_playerInfo.CertificateData.ExtraData.Xuid)), }; if (_session.CryptoContext.UseEncryption) { ECDiffieHellmanPublicKey publicKey = CryptoUtils.CreateEcDiffieHellmanPublicKey(_playerInfo.CertificateData.IdentityPublicKey); if (Log.IsDebugEnabled) { Log.Debug($"Cert:\n{publicKey.ToXmlString()}"); } // Create shared shared secret ECDiffieHellmanCng ecKey = new ECDiffieHellmanCng(384); ecKey.HashAlgorithm = CngAlgorithm.Sha256; ecKey.KeyDerivationFunction = ECDiffieHellmanKeyDerivationFunction.Hash; ecKey.SecretPrepend = Encoding.UTF8.GetBytes("RANDOM SECRET"); // Server token byte[] secret = ecKey.DeriveKeyMaterial(publicKey); if (Log.IsDebugEnabled) { Log.Debug($"SECRET KEY (b64):\n{Convert.ToBase64String(secret)}"); } { RijndaelManaged rijAlg = new RijndaelManaged { BlockSize = 128, Padding = PaddingMode.None, Mode = CipherMode.CFB, FeedbackSize = 8, Key = secret, IV = secret.Take(16).ToArray(), }; // Create a decrytor to perform the stream transform. ICryptoTransform decryptor = rijAlg.CreateDecryptor(rijAlg.Key, rijAlg.IV); MemoryStream inputStream = new MemoryStream(); CryptoStream cryptoStreamIn = new CryptoStream(inputStream, decryptor, CryptoStreamMode.Read); ICryptoTransform encryptor = rijAlg.CreateEncryptor(rijAlg.Key, rijAlg.IV); MemoryStream outputStream = new MemoryStream(); CryptoStream cryptoStreamOut = new CryptoStream(outputStream, encryptor, CryptoStreamMode.Write); _session.CryptoContext.Algorithm = rijAlg; _session.CryptoContext.Decryptor = decryptor; _session.CryptoContext.Encryptor = encryptor; _session.CryptoContext.InputStream = inputStream; _session.CryptoContext.OutputStream = outputStream; _session.CryptoContext.CryptoStreamIn = cryptoStreamIn; _session.CryptoContext.CryptoStreamOut = cryptoStreamOut; } var response = McpeServerToClientHandshake.CreateObject(); response.NoBatch = true; response.ForceClear = true; response.serverPublicKey = Convert.ToBase64String(ecKey.PublicKey.GetDerEncoded()); response.tokenLength = (short)ecKey.SecretPrepend.Length; response.token = ecKey.SecretPrepend; _session.SendPackage(response); if (Log.IsDebugEnabled) { Log.Warn($"Encryption enabled for {_session.Username}"); } } } } if (!_session.CryptoContext.UseEncryption) { _session.MessageHandler.HandleMcpeClientToServerHandshake(null); } } catch (Exception e) { Log.Error("Decrypt", e); } }
public void ProcessPacket(HandshakeRequest packet, NebulaConnection conn) { Player player; using (playerManager.GetPendingPlayers(out var pendingPlayers)) { if (!pendingPlayers.TryGetValue(conn, out player)) { conn.Disconnect(DisconnectionReason.InvalidData); Log.Warn("WARNING: Player tried to handshake without being in the pending list"); return; } pendingPlayers.Remove(conn); } if (packet.ModVersion != Config.ModVersion) { conn.Disconnect(DisconnectionReason.ModVersionMismatch, $"{ packet.ModVersion };{ Config.ModVersion }"); return; } if (packet.GameVersionSig != GameConfig.gameVersion.sig) { conn.Disconnect(DisconnectionReason.GameVersionMismatch, $"{ packet.GameVersionSig };{ GameConfig.gameVersion.sig }"); return; } SimulatedWorld.OnPlayerJoining(); //TODO: some validation of client cert / generating auth challenge for the client // Load old data of the client string clientCertHash = CryptoUtils.Hash(packet.ClientCert); using (playerManager.GetSavedPlayerData(out var savedPlayerData)) { if (savedPlayerData.TryGetValue(clientCertHash, out var value)) { player.LoadUserData(value); } else { savedPlayerData.Add(clientCertHash, player.Data); } } // Add the username to the player data player.Data.Username = packet.Username; // Make sure that each player that is currently in the game receives that a new player as join so they can create its RemotePlayerCharacter PlayerJoining pdata = new PlayerJoining(player.Data.CreateCopyWithoutMechaData()); // Remove inventory from mecha data using (playerManager.GetConnectedPlayers(out var connectedPlayers)) { foreach (var kvp in connectedPlayers) { kvp.Value.SendPacket(pdata); } } // Add the new player to the list using (playerManager.GetSyncingPlayers(out var syncingPlayers)) { syncingPlayers.Add(conn, player); } //Add current tech bonuses to the connecting player based on the Host's mecha player.Data.Mecha.TechBonuses = new PlayerTechBonuses(GameMain.mainPlayer.mecha); var gameDesc = GameMain.data.gameDesc; player.SendPacket(new HandshakeResponse(gameDesc.galaxyAlgo, gameDesc.galaxySeed, gameDesc.starCount, gameDesc.resourceMultiplier, player.Data)); }
public void HandlePacket(Packet message) { if (message is McpeWrapper wrapper) { var messages = new LinkedList <Packet>(); // Get bytes to process var payload = wrapper.payload.ToArray(); // Decrypt bytes if (CryptoContext != null && CryptoContext.UseEncryption) { FirstEncryptedPacketWaitHandle.Set(); payload = CryptoUtils.Decrypt(payload, CryptoContext); _hasEncrypted = true; } //var stream = new MemoryStreamReader(payload); using (var deflateStream = new DeflateStream(new MemoryStream(payload), System.IO.Compression.CompressionMode.Decompress, false)) { using var s = new MemoryStream(); deflateStream.CopyTo(s); s.Position = 0; int count = 0; // Get actual packet out of bytes while (s.Position < s.Length) { count++; uint len = VarInt.ReadUInt32(s); long pos = s.Position; ReadOnlyMemory <byte> internalBuffer = s.GetBuffer().AsMemory((int)s.Position, (int)len); int id = VarInt.ReadInt32(s); Packet packet = null; try { packet = PacketFactory.Create((byte)id, internalBuffer, "mcpe") ?? new UnknownPacket((byte)id, internalBuffer); //Hack for some servers that screw up the order. // if (packet is McpePlayerList) // { // messages.AddFirst(packet); // } // else { messages.AddLast(packet); } //var a = 0x91; } catch (Exception e) { Log.Warn(e, $"Error parsing bedrock message #{count} id={id}\n{Packet.HexDump(internalBuffer)}"); //throw; return; // Exit, but don't crash. } s.Position = pos + len; } if (s.Length > s.Position) { throw new Exception("Have more data"); } } //var msgs = messages.ToArray(); //messages.Clear(); foreach (Packet msg in messages) { msg.ReliabilityHeader = new ReliabilityHeader() { Reliability = wrapper.ReliabilityHeader.Reliability, ReliableMessageNumber = wrapper.ReliabilityHeader.ReliableMessageNumber, OrderingChannel = wrapper.ReliabilityHeader.OrderingChannel, OrderingIndex = wrapper.ReliabilityHeader.OrderingIndex, SequencingIndex = wrapper.ReliabilityHeader.SequencingIndex }; try { HandleGamePacket(msg); } catch (Exception e) { Log.Warn(e, $"Bedrock message handler error"); } } wrapper.PutPool(); } else if (message is UnknownPacket unknownPacket) { if (Log.IsDebugEnabled) { Log.Warn($"Received unknown packet 0x{unknownPacket.Id:X2}\n{Packet.HexDump(unknownPacket.Message)}"); } unknownPacket.PutPool(); } else { Log.Error($"Unhandled packet: {message.GetType().Name} 0x{message.Id:X2} for user: {_session.Username}, IP {_session.EndPoint.Address}"); if (Log.IsDebugEnabled) { Log.Warn($"Unknown packet 0x{message.Id:X2}\n{Packet.HexDump(message.Bytes)}"); } } }
public static async Task MoveToFolder(this VaultOnline vault, IEnumerable <RecordPath> objects, string toFolderUid, bool link = false) { var destinationFolder = vault.GetFolder(toFolderUid); var destinationFolderScope = destinationFolder.FolderType != FolderType.UserFolder ? destinationFolder.FolderType == FolderType.SharedFolderFolder ? destinationFolder.SharedFolderUid : destinationFolder.FolderUid : ""; var encryptionKey = vault.Auth.AuthContext.DataKey; if (!string.IsNullOrEmpty(destinationFolderScope)) { if (!vault.TryGetSharedFolder(destinationFolderScope, out var sf)) { throw new VaultException($"Cannot find destination shared folder"); } encryptionKey = sf.SharedFolderKey; } var moveObjects = new List <MoveObject>(); var keyObjects = new Dictionary <string, TransitionKey>(); void TraverseFolderForRecords(FolderNode folder) { if (folder.FolderType == FolderType.SharedFolder && destinationFolder.FolderType != FolderType.UserFolder) { throw new VaultException($"Cannot move shared folder \"{folder.Name}\" to another shared folder"); } var scope = folder.FolderType != FolderType.UserFolder ? folder.FolderType == FolderType.SharedFolderFolder ? folder.SharedFolderUid : destinationFolder.FolderUid : ""; if (scope != destinationFolderScope) { foreach (var recordUid in folder.Records) { if (keyObjects.ContainsKey(recordUid)) { continue; } if (!vault.TryGetRecord(recordUid, out var record)) { keyObjects.Add(recordUid, new TransitionKey { uid = recordUid, key = CryptoUtils.EncryptAesV1(record.RecordKey, encryptionKey).Base64UrlEncode(), }); } } } foreach (var fUid in folder.Subfolders) { TraverseFolderForRecords(vault.GetFolder(fUid)); } } foreach (var mo in objects) { var sourceFolder = vault.GetFolder(mo.FolderUid); if (string.IsNullOrEmpty(mo.RecordUid)) // move folder { if (string.IsNullOrEmpty(sourceFolder.ParentUid)) { throw new VaultException("Cannot move root folder"); } var f = destinationFolder; while (!string.IsNullOrEmpty(f.ParentUid)) { if (f.FolderUid == sourceFolder.FolderUid) { throw new VaultException($"Cannot move the folder into its subfolder."); } f = vault.GetFolder(f.ParentUid); } TraverseFolderForRecords(sourceFolder); var parentFolder = vault.GetFolder(sourceFolder.ParentUid); moveObjects.Add(new MoveObject { fromUid = string.IsNullOrEmpty(sourceFolder.FolderUid) ? null : sourceFolder.FolderUid, fromType = parentFolder.FolderType.GetFolderTypeText(), uid = mo.RecordUid, type = sourceFolder.FolderType.GetFolderTypeText(), cascade = true, }); } else { if (!vault.TryGetRecord(mo.RecordUid, out var record)) { throw new VaultException(""); } var scope = sourceFolder.FolderType != FolderType.UserFolder ? sourceFolder.FolderType == FolderType.SharedFolderFolder ? sourceFolder.SharedFolderUid : sourceFolder.FolderUid : ""; if (scope != destinationFolderScope && !keyObjects.ContainsKey(mo.RecordUid)) { keyObjects.Add(mo.RecordUid, new TransitionKey { uid = mo.RecordUid, key = CryptoUtils.EncryptAesV1(record.RecordKey, encryptionKey).Base64UrlEncode(), }); } moveObjects.Add(new MoveObject { fromUid = string.IsNullOrEmpty(sourceFolder.FolderUid) ? null : sourceFolder.FolderUid, fromType = sourceFolder.FolderType.GetFolderTypeText(), uid = mo.RecordUid, type = "record", cascade = false }); } } var request = new MoveCommand { toUid = destinationFolder.FolderUid, toType = destinationFolder.FolderType.GetFolderTypeText(), isLink = link, moveObjects = moveObjects.ToArray(), transitionKeys = keyObjects.Count == 0 ? null : keyObjects.Values.ToArray(), }; await vault.Auth.ExecuteAuthCommand(request); }
public async Task Should_Announce_Secret_Hash() { var bob = await Fixture.GenerateAccountWithCurrency(10000); /* var priv_bob = await Fixture.GenerateAccountWithCurrency(10000); * * var priv_alice = await Fixture.GenerateAccountWithCurrency(10000);*/ var alice = await Fixture.GenerateAccountWithCurrency(10000); Log.WriteLine($"Bob Account Address: {bob.Address.Plain} \r\n Private Key: {bob.PrivateKey} \r\n Public Key {bob.PublicKey}"); Log.WriteLine($"Alice Account Address: {alice.Address.Plain} \r\n Private Key: {alice.PrivateKey} \r\n Public Key {alice.PublicKey}"); //var secret = "8ee6bb9dc6dbf61db3b2c01086976ffac34b0820c1a4c8aeff92f59c33cb49fa"; byte[] Seed = new byte[40]; // Random random = new Random(); // random.NextBytes(Seed); var secret_ = CryptoUtils.Sha3_256(Seed); var secret = BitConverter.ToString(secret_); secret = secret.Replace("-", "").ToLowerInvariant(); var csprng = new RNGCryptoServiceProvider(); csprng.GetNonZeroBytes(Seed); var proof = BitConverter.ToString(Seed); //var proof = string.Join("", Seed.Select(b => b.ToString("X"))); proof = proof.Replace("-", ""); /* Random rdm = new Random(); * string proof = string.Empty; * int num; * * for (int i = 0; i < 4; i++) * { * num = rdm.Next(0, int.MaxValue); * proof += num.ToString("X4"); * }*/ //proof = Hex.EncodeHexString(Seed); //proof = proof.Replace("-", ""); //Random random = new Random(); //string proof = random.NextBytes(secretSeed); var nonce = MosaicNonce.CreateRandom(); var mosaic_id = MosaicId.CreateFromNonce(nonce, Fixture.GenerationHash); var secretLockTransaction = SecretLockTransaction.Create(Deadline.Create(), // NetworkCurrencyMosaic.CreateRelative(10), new Mosaic(mosaic_id, 10), (ulong)100, HashType.SHA3_256, secret, bob.Address, NetworkType.TEST_NET); /* var aggregateTransaction = AggregateTransaction.CreateComplete( * Deadline.Create(), * new List<Transaction> * { * secretLockTransaction.ToAggregate(Fixture.SeedAccount.PublicAccount) * }, * Fixture.NetworkType);*/ var signedTransaction = alice.Sign(secretLockTransaction, Fixture.GenerationHash); Fixture.WatchForFailure(signedTransaction); Log.WriteLine($"Going to announce transaction {signedTransaction.Hash}"); Log.WriteLine($"Proof {proof}"); Log.WriteLine($"Secret {secret}"); Log.WriteLine($"proof length {proof.DecodeHexString().Length}"); await Fixture.SiriusClient.TransactionHttp.Announce(signedTransaction); //Log.WriteLine($"Going to announce Secret Lock Transaction {signedTransaction.Hash}"); var secretProofTransaction = SecretProofTransaction.Create( Deadline.Create(), HashType.SHA3_256, Recipient.From(bob.Address), secret, secret, Fixture.NetworkType ); var secretProofsignedTransaction = bob.Sign(secretProofTransaction, Fixture.GenerationHash); Fixture.WatchForFailure(secretProofsignedTransaction); Thread.Sleep(8000); // Log.WriteLine($"Going to announce Secret Lock Transaction {signedTransaction.Hash}"); // var tx = Fixture.SiriusWebSocketClient.Listener.ConfirmedTransactionsGiven(account.Address).Take(1).Timeout(TimeSpan.FromSeconds(3000)); // var tx = Fixture.SiriusWebSocketClient.Listener.ConfirmedTransactionsGiven(alice.Address).Take(1).Timeout(TimeSpan.FromSeconds(3000)); await Fixture.SiriusClient.TransactionHttp.Announce(secretProofsignedTransaction); Thread.Sleep(8000); Log.WriteLine($"Going to announce Secret Proof Transaction {secretProofsignedTransaction.Hash}"); //var tx = Fixture.SiriusWebSocketClient.Listener.ConfirmedTransactionsGiven(alice.Address).Take(1).Timeout(TimeSpan.FromSeconds(3000)); //var secretLocksignedTransaction = account.Sign(secretLockTransaction, Fixture.GenerationHash); // await Fixture.SiriusClient.TransactionHttp.Announce(secretLocksignedTransaction); // var result = await tx; // Log.WriteLine($"Request confirmed with transaction {result.TransactionInfo.Hash}"); }
public bool ProcessFile(string sourcePath, string destDir, bool respectFileNameInMeta = true) { string destPath = null; try { FileItem fi = new FileItem(sourcePath); using (CloudSyncFile cloudSyncFile = new CloudSyncFile(fi, _handlerFactory)) { cloudSyncFile.InitParsing(); FileMeta3 fileMeta = cloudSyncFile.GetFileMeta(); //Generate session key and make sure it matches the file byte[] sessionKeyComputed = CryptoUtils.RsaOaepDeciper(fileMeta.EncKey2, this._cloudSyncKey.KeyPair.Private); string sessionKeyHashStrComputed = CryptoUtils.SaltedMd5( fileMeta.SessionKeyHash.Substring(0, 10), sessionKeyComputed); if (!fileMeta.SessionKeyHash.Equals(sessionKeyHashStrComputed)) { throw new InvalidDataException($"File {fi.Name}, Computed session key is incorrect."); } //decrypt content byte[] sessionKeyBytes = BytesUtils.HexStringToByteArray( Encoding.ASCII.GetString(sessionKeyComputed)); ParametersWithIV keys = CryptoUtils.DeriveAESKeyParameters(sessionKeyBytes, null); AesCbcCryptor decryptor = new AesCbcCryptor(((KeyParameter)keys.Parameters).GetKey(), keys.GetIV()); destPath = Path.Join(destDir, respectFileNameInMeta ? fileMeta.FileName : Path.GetFileName(sourcePath)); using (var hasher = MD5.Create()) { using (AesCryptoServiceProvider aes = new AesCryptoServiceProvider()) { aes.Mode = CipherMode.CBC; aes.Key = ((KeyParameter)keys.Parameters).GetKey(); aes.IV = keys.GetIV(); //Stopwatch stopwatch = new Stopwatch(); //stopwatch.Start(); //byte[] buffer = new byte[1024 * 1024]; long bytesRead = 0; ICryptoTransform decoder = aes.CreateDecryptor(); using (CloudSyncPayloadStream cspls = new CloudSyncPayloadStream(cloudSyncFile.GetDataBlocks(decryptor))) using (CryptoStream aesStream = new CryptoStream(cspls, decoder, CryptoStreamMode.Read)) using (LZ4DecoderStream lz4ds = LZ4Stream.Decode(aesStream)) using (FileStream writeFs = new FileStream(destPath, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.ReadWrite, 1024 * 1024)) using (CryptoStream md5HashStream = new CryptoStream(writeFs, hasher, CryptoStreamMode.Write)) { lz4ds.CopyTo(md5HashStream, 1024 * 1024); // int read; // while ((read = md5HashStream.Read(buffer, 0, buffer.Length)) > 0) // { // //do nothing // bytesRead += read; // long elapsed = stopwatch.ElapsedMilliseconds; // if (elapsed > 1000) // { // double readInM = (double) bytesRead / 1024.0 / 1024.0/elapsed*1000.0; // bytesRead = 0; // Console.WriteLine($"Speed:{readInM} M/s"); // stopwatch.Reset(); // stopwatch.Start(); // } // } } //stopwatch.Stop(); if (!cloudSyncFile.VerifyContentHash(hasher.Hash)) { throw new InvalidDataException("File Md5 doesn't match."); } } } return(true); } } catch (Exception ex) { if (File.Exists(destPath)) { File.Delete(destPath); } this._exceptionHandler.Handle(ex); } return(false); }
/// <exception cref="System.IO.IOException"/> private RawKeyValueIterator FinalMerge(JobConf job, FileSystem fs, IList <InMemoryMapOutput <K, V> > inMemoryMapOutputs, IList <MergeManagerImpl.CompressAwarePath> onDiskMapOutputs ) { Log.Info("finalMerge called with " + inMemoryMapOutputs.Count + " in-memory map-outputs and " + onDiskMapOutputs.Count + " on-disk map-outputs"); long maxInMemReduce = GetMaxInMemReduceLimit(); // merge config params Type keyClass = (Type)job.GetMapOutputKeyClass(); Type valueClass = (Type)job.GetMapOutputValueClass(); bool keepInputs = job.GetKeepFailedTaskFiles(); Path tmpDir = new Path(reduceId.ToString()); RawComparator <K> comparator = (RawComparator <K>)job.GetOutputKeyComparator(); // segments required to vacate memory IList <Merger.Segment <K, V> > memDiskSegments = new AList <Merger.Segment <K, V> >(); long inMemToDiskBytes = 0; bool mergePhaseFinished = false; if (inMemoryMapOutputs.Count > 0) { TaskID mapId = inMemoryMapOutputs[0].GetMapId().GetTaskID(); inMemToDiskBytes = CreateInMemorySegments(inMemoryMapOutputs, memDiskSegments, maxInMemReduce ); int numMemDiskSegments = memDiskSegments.Count; if (numMemDiskSegments > 0 && ioSortFactor > onDiskMapOutputs.Count) { // If we reach here, it implies that we have less than io.sort.factor // disk segments and this will be incremented by 1 (result of the // memory segments merge). Since this total would still be // <= io.sort.factor, we will not do any more intermediate merges, // the merge of all these disk segments would be directly fed to the // reduce method mergePhaseFinished = true; // must spill to disk, but can't retain in-mem for intermediate merge Path outputPath = mapOutputFile.GetInputFileForWrite(mapId, inMemToDiskBytes).Suffix (Org.Apache.Hadoop.Mapred.Task.MergedOutputPrefix); RawKeyValueIterator rIter = Merger.Merge(job, fs, keyClass, valueClass, memDiskSegments , numMemDiskSegments, tmpDir, comparator, reporter, spilledRecordsCounter, null, mergePhase); FSDataOutputStream @out = CryptoUtils.WrapIfNecessary(job, fs.Create(outputPath)); IFile.Writer <K, V> writer = new IFile.Writer <K, V>(job, @out, keyClass, valueClass , codec, null, true); try { Merger.WriteFile(rIter, writer, reporter, job); writer.Close(); onDiskMapOutputs.AddItem(new MergeManagerImpl.CompressAwarePath(outputPath, writer .GetRawLength(), writer.GetCompressedLength())); writer = null; } catch (IOException e) { // add to list of final disk outputs. if (null != outputPath) { try { fs.Delete(outputPath, true); } catch (IOException) { } } // NOTHING throw; } finally { if (null != writer) { writer.Close(); } } Log.Info("Merged " + numMemDiskSegments + " segments, " + inMemToDiskBytes + " bytes to disk to satisfy " + "reduce memory limit"); inMemToDiskBytes = 0; memDiskSegments.Clear(); } else { if (inMemToDiskBytes != 0) { Log.Info("Keeping " + numMemDiskSegments + " segments, " + inMemToDiskBytes + " bytes in memory for " + "intermediate, on-disk merge"); } } } // segments on disk IList <Merger.Segment <K, V> > diskSegments = new AList <Merger.Segment <K, V> >(); long onDiskBytes = inMemToDiskBytes; long rawBytes = inMemToDiskBytes; MergeManagerImpl.CompressAwarePath[] onDisk = Sharpen.Collections.ToArray(onDiskMapOutputs , new MergeManagerImpl.CompressAwarePath[onDiskMapOutputs.Count]); foreach (MergeManagerImpl.CompressAwarePath file in onDisk) { long fileLength = fs.GetFileStatus(file).GetLen(); onDiskBytes += fileLength; rawBytes += (file.GetRawDataLength() > 0) ? file.GetRawDataLength() : fileLength; Log.Debug("Disk file: " + file + " Length is " + fileLength); diskSegments.AddItem(new Merger.Segment <K, V>(job, fs, file, codec, keepInputs, ( file.ToString().EndsWith(Org.Apache.Hadoop.Mapred.Task.MergedOutputPrefix) ? null : mergedMapOutputsCounter), file.GetRawDataLength())); } Log.Info("Merging " + onDisk.Length + " files, " + onDiskBytes + " bytes from disk" ); diskSegments.Sort(new _IComparer_786()); // build final list of segments from merged backed by disk + in-mem IList <Merger.Segment <K, V> > finalSegments = new AList <Merger.Segment <K, V> >(); long inMemBytes = CreateInMemorySegments(inMemoryMapOutputs, finalSegments, 0); Log.Info("Merging " + finalSegments.Count + " segments, " + inMemBytes + " bytes from memory into reduce" ); if (0 != onDiskBytes) { int numInMemSegments = memDiskSegments.Count; diskSegments.AddRange(0, memDiskSegments); memDiskSegments.Clear(); // Pass mergePhase only if there is a going to be intermediate // merges. See comment where mergePhaseFinished is being set Progress thisPhase = (mergePhaseFinished) ? null : mergePhase; RawKeyValueIterator diskMerge = Merger.Merge(job, fs, keyClass, valueClass, codec , diskSegments, ioSortFactor, numInMemSegments, tmpDir, comparator, reporter, false , spilledRecordsCounter, null, thisPhase); diskSegments.Clear(); if (0 == finalSegments.Count) { return(diskMerge); } finalSegments.AddItem(new Merger.Segment <K, V>(new MergeManagerImpl.RawKVIteratorReader (this, diskMerge, onDiskBytes), true, rawBytes)); } return(Merger.Merge(job, fs, keyClass, valueClass, finalSegments, finalSegments.Count , tmpDir, comparator, reporter, spilledRecordsCounter, null, null)); }
/// <exception cref="System.IO.IOException"/> public override void Merge(IList <InMemoryMapOutput <K, V> > inputs) { if (inputs == null || inputs.Count == 0) { return; } //name this output file same as the name of the first file that is //there in the current list of inmem files (this is guaranteed to //be absent on the disk currently. So we don't overwrite a prev. //created spill). Also we need to create the output file now since //it is not guaranteed that this file will be present after merge //is called (we delete empty files as soon as we see them //in the merge method) //figure out the mapId TaskAttemptID mapId = inputs[0].GetMapId(); TaskID mapTaskId = mapId.GetTaskID(); IList <Merger.Segment <K, V> > inMemorySegments = new AList <Merger.Segment <K, V> >(); long mergeOutputSize = this._enclosing.CreateInMemorySegments(inputs, inMemorySegments , 0); int noInMemorySegments = inMemorySegments.Count; Path outputPath = this._enclosing.mapOutputFile.GetInputFileForWrite(mapTaskId, mergeOutputSize ).Suffix(Org.Apache.Hadoop.Mapred.Task.MergedOutputPrefix); FSDataOutputStream @out = CryptoUtils.WrapIfNecessary(this._enclosing.jobConf, this ._enclosing.rfs.Create(outputPath)); IFile.Writer <K, V> writer = new IFile.Writer <K, V>(this._enclosing.jobConf, @out, (Type)this._enclosing.jobConf.GetMapOutputKeyClass(), (Type)this._enclosing.jobConf .GetMapOutputValueClass(), this._enclosing.codec, null, true); RawKeyValueIterator rIter = null; MergeManagerImpl.CompressAwarePath compressAwarePath; try { MergeManagerImpl.Log.Info("Initiating in-memory merge with " + noInMemorySegments + " segments..."); rIter = Merger.Merge(this._enclosing.jobConf, this._enclosing.rfs, (Type)this._enclosing .jobConf.GetMapOutputKeyClass(), (Type)this._enclosing.jobConf.GetMapOutputValueClass (), inMemorySegments, inMemorySegments.Count, new Path(this._enclosing.reduceId. ToString()), (RawComparator <K>) this._enclosing.jobConf.GetOutputKeyComparator(), this._enclosing.reporter, this._enclosing.spilledRecordsCounter, null, null); if (null == this._enclosing.combinerClass) { Merger.WriteFile(rIter, writer, this._enclosing.reporter, this._enclosing.jobConf ); } else { this._enclosing.combineCollector.SetWriter(writer); this._enclosing.CombineAndSpill(rIter, this._enclosing.reduceCombineInputCounter); } writer.Close(); compressAwarePath = new MergeManagerImpl.CompressAwarePath(outputPath, writer.GetRawLength (), writer.GetCompressedLength()); MergeManagerImpl.Log.Info(this._enclosing.reduceId + " Merge of the " + noInMemorySegments + " files in-memory complete." + " Local file is " + outputPath + " of size " + this._enclosing.localFS.GetFileStatus(outputPath).GetLen()); } catch (IOException e) { //make sure that we delete the ondisk file that we created //earlier when we invoked cloneFileAttributes this._enclosing.localFS.Delete(outputPath, true); throw; } // Note the output of the merge this._enclosing.CloseOnDiskFile(compressAwarePath); }
public override (AttestationType, X509Certificate2[]) Verify() { // verify that aaguid is 16 empty bytes (note: required by fido2 conformance testing, could not find this in spec?) if (0 != AuthData.AttestedCredentialData.AaGuid.CompareTo(Guid.Empty)) { throw new VerificationException("Aaguid was not empty parsing fido-u2f atttestation statement"); } // https://www.w3.org/TR/webauthn/#fido-u2f-attestation // 1. Verify that attStmt is valid CBOR conforming to the syntax defined above and perform CBOR decoding on it to extract the contained fields. // (handled in base class) if (null == X5c || CBORType.Array != X5c.Type || X5c.Count != 1) { throw new VerificationException("Malformed x5c in fido - u2f attestation"); } // 2a. Check that x5c has exactly one element and let attCert be that element. if (null == X5c.Values || 0 == X5c.Values.Count || CBORType.ByteString != X5c.Values.First().Type || 0 == X5c.Values.First().GetByteString().Length) { throw new VerificationException("Malformed x5c in fido-u2f attestation"); } var attCert = new X509Certificate2(X5c.Values.First().GetByteString()); // TODO : Check why this variable isn't used. Remove it or use it. var u2ftransports = U2FTransportsFromAttnCert(attCert.Extensions); // 2b. If certificate public key is not an Elliptic Curve (EC) public key over the P-256 curve, terminate this algorithm and return an appropriate error var pubKey = attCert.GetECDsaPublicKey(); var keyParams = pubKey.ExportParameters(false); if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { if (!keyParams.Curve.Oid.FriendlyName.Equals(ECCurve.NamedCurves.nistP256.Oid.FriendlyName)) { throw new VerificationException("Attestation certificate public key is not an Elliptic Curve (EC) public key over the P-256 curve"); } } else { if (!keyParams.Curve.Oid.Value.Equals(ECCurve.NamedCurves.nistP256.Oid.Value)) { throw new VerificationException("Attestation certificate public key is not an Elliptic Curve (EC) public key over the P-256 curve"); } } // 3. Extract the claimed rpIdHash from authenticatorData, and the claimed credentialId and credentialPublicKey from authenticatorData // see rpIdHash, credentialId, and credentialPublicKey members of base class AuthenticatorData (AuthData) // 4. Convert the COSE_KEY formatted credentialPublicKey (see Section 7 of [RFC8152]) to CTAP1/U2F public Key format (Raw ANSI X9.62 public key format) // 4a. Let x be the value corresponding to the "-2" key (representing x coordinate) in credentialPublicKey, and confirm its size to be of 32 bytes. If size differs or "-2" key is not found, terminate this algorithm and return an appropriate error var x = CredentialPublicKey[CBORObject.FromObject(COSE.KeyTypeParameter.X)].GetByteString(); // 4b. Let y be the value corresponding to the "-3" key (representing y coordinate) in credentialPublicKey, and confirm its size to be of 32 bytes. If size differs or "-3" key is not found, terminate this algorithm and return an appropriate error var y = CredentialPublicKey[CBORObject.FromObject(COSE.KeyTypeParameter.Y)].GetByteString(); // 4c.Let publicKeyU2F be the concatenation 0x04 || x || y var publicKeyU2F = new byte[1] { 0x4 }.Concat(x).Concat(y).ToArray(); // 5. Let verificationData be the concatenation of (0x00 || rpIdHash || clientDataHash || credentialId || publicKeyU2F) var verificationData = new byte[1] { 0x00 }; verificationData = verificationData .Concat(AuthData.RpIdHash) .Concat(clientDataHash) .Concat(AuthData.AttestedCredentialData.CredentialID) .Concat(publicKeyU2F.ToArray()) .ToArray(); // 6. Verify the sig using verificationData and certificate public key if (null == Sig || CBORType.ByteString != Sig.Type || 0 == Sig.GetByteString().Length) { throw new VerificationException("Invalid fido-u2f attestation signature"); } byte[] ecsig; try { ecsig = CryptoUtils.SigFromEcDsaSig(Sig.GetByteString(), pubKey.KeySize); } catch (Exception ex) { throw new VerificationException("Failed to decode fido-u2f attestation signature from ASN.1 encoded form", ex); } var coseAlg = CredentialPublicKey[CBORObject.FromObject(COSE.KeyCommonParameter.Alg)].AsInt32(); var hashAlg = CryptoUtils.HashAlgFromCOSEAlg(coseAlg); if (true != pubKey.VerifyData(verificationData, ecsig, hashAlg)) { throw new VerificationException("Invalid fido-u2f attestation signature"); } // 7. Optionally, inspect x5c and consult externally provided knowledge to determine whether attStmt conveys a Basic or AttCA attestation var trustPath = X5c.Values .Select(xx => new X509Certificate2(xx.GetByteString())) .ToArray(); return(AttestationType.AttCa, trustPath); }
/// <summary> /// Incrementally downloads vault data. /// </summary> /// <param name="vault">Vault connected to Keeper.</param> /// <returns></returns> internal static async Task RunSyncDown(this VaultOnline vault) { var storage = vault.Storage; var context = vault.Auth.AuthContext; var clientKey = vault.ClientKey; var command = new SyncDownCommand { revision = storage.Revision, include = new[] { "sfheaders", "sfrecords", "sfusers", "teams", "folders" }, deviceName = vault.Auth.Endpoint.DeviceName, deviceId = vault.Auth.Endpoint.DeviceName }; var rs = await vault.Auth.ExecuteAuthCommand <SyncDownCommand, SyncDownResponse>(command); Debug.WriteLine("Sync Down: Enter"); var isFullSync = rs.fullSync; if (isFullSync) { storage.Clear(); } var result = new RebuildTask(isFullSync); if (rs.removedRecords != null) { storage.RecordKeys.DeleteLinks( rs.removedRecords .Select(recordUid => UidLink.Create(recordUid, storage.PersonalScopeUid))); var folderRecords = new List <IUidLink>(); foreach (var recordUid in rs.removedRecords) { result.AddRecord(recordUid); var links = storage.FolderRecords.GetLinksForObject(recordUid).ToArray(); foreach (var link in links) { var folderUid = link.FolderUid; if (string.IsNullOrEmpty(folderUid) && folderUid == storage.PersonalScopeUid) { folderRecords.Add(link); } else { var folder = storage.Folders.GetEntity(folderUid); if (folder?.FolderType == "user_folder") { folderRecords.Add(link); } } } } storage.FolderRecords.DeleteLinks(folderRecords); } if (rs.removedTeams != null) { foreach (var teamUid in rs.removedTeams) { var sfLinks = storage.SharedFolderKeys.GetLinksForObject(teamUid).ToArray(); foreach (var sfLink in sfLinks) { result.AddSharedFolder(sfLink.SharedFolderUid); } storage.SharedFolderKeys.DeleteLinks(sfLinks); } storage.Teams.DeleteUids(rs.removedTeams); } if (rs.removedSharedFolders != null) { foreach (var sharedFolderUid in rs.removedSharedFolders) { result.AddSharedFolder(sharedFolderUid); var links = storage.RecordKeys.GetLinksForObject(sharedFolderUid).ToArray(); foreach (var recLink in links) { result.AddRecord(recLink.RecordUid); } } storage.SharedFolderKeys.DeleteLinks( rs.removedSharedFolders .Select(x => UidLink.Create(x, storage.PersonalScopeUid))); } if (rs.userFoldersRemoved != null) { storage.FolderRecords.DeleteLinksForSubjects(rs.userFoldersRemoved.Select(x => x.folderUid)); storage.Folders.DeleteUids(rs.userFoldersRemoved.Select(x => x.folderUid)); } if (rs.sharedFolderFolderRemoved != null) { var folderUids = rs.sharedFolderFolderRemoved .Select(x => x.FolderUid ?? x.SharedFolderUid).ToArray(); storage.FolderRecords.DeleteLinksForSubjects(folderUids); storage.Folders.DeleteUids(folderUids); } if (rs.userFolderSharedFoldersRemoved != null) { storage.FolderRecords.DeleteLinksForSubjects(rs.userFolderSharedFoldersRemoved .Select(x => x.SharedFolderUid)); storage.Folders.DeleteUids(rs.userFolderSharedFoldersRemoved .Select(x => x.SharedFolderUid)); } if (rs.userFoldersRemovedRecords != null) { var links = rs.userFoldersRemovedRecords .Select(x => UidLink.Create(x.folderUid ?? storage.PersonalScopeUid, x.RecordUid)) .ToArray(); storage.FolderRecords.DeleteLinks(links); } if (rs.sharedFolderFolderRecordsRemoved != null) { var links = rs.sharedFolderFolderRecordsRemoved .Select(x => UidLink.Create(x.folderUid ?? x.sharedFolderUid, x.recordUid)) .ToArray(); storage.FolderRecords.DeleteLinks(links); } if (rs.sharedFolders != null) { // full sync shared folders var fullSyncSharedFolders = rs.sharedFolders .Where(x => x.fullSync == true) .Select(x => x.SharedFolderUid) .ToArray(); storage.RecordKeys.DeleteLinksForObjects(fullSyncSharedFolders); storage.SharedFolderKeys.DeleteLinksForSubjects(fullSyncSharedFolders); storage.SharedFolderPermissions.DeleteLinksForSubjects(fullSyncSharedFolders); // records var affectedLinks = rs.sharedFolders .Where(x => !x.fullSync.HasValue || !x.fullSync.Value) .Where(x => x.recordsRemoved != null) .SelectMany(x => x.recordsRemoved, (x, recordUid) => UidLink.Create(recordUid, x.SharedFolderUid)) .Cast <IUidLink>() .ToArray(); if (affectedLinks.Any()) { storage.RecordKeys.DeleteLinks(affectedLinks); foreach (var x in affectedLinks) { result.AddRecord(x.SubjectUid); } } // teams var affectedTeams = rs.sharedFolders .Where(x => !x.fullSync.HasValue || !x.fullSync.Value) .Where(x => x.teamsRemoved != null) .SelectMany(x => x.teamsRemoved, (x, teamUid) => UidLink.Create(x.SharedFolderUid, teamUid)) .Cast <IUidLink>() .ToArray(); if (affectedTeams.Any()) { storage.SharedFolderKeys.DeleteLinks(affectedTeams); } //users var affectedUsers = rs.sharedFolders .Where(x => !x.fullSync.HasValue || !x.fullSync.Value) .Where(x => x.usersRemoved != null) .SelectMany(x => x.usersRemoved, (x, username) => UidLink.Create(x.SharedFolderUid, username)) .Cast <IUidLink>() .ToArray(); if (affectedTeams.Any() || affectedLinks.Any()) { storage.SharedFolderPermissions.DeleteLinks(affectedTeams.Concat(affectedUsers)); } } if (rs.nonSharedData != null) { storage.NonSharedData.PutEntities(rs.nonSharedData .Where(x => !string.IsNullOrEmpty(x.data)) .Select(x => { try { var data = x.data.Base64UrlDecode(); data = CryptoUtils.DecryptAesV1(data, context.DataKey); data = CryptoUtils.EncryptAesV1(data, clientKey); x.data = data.Base64UrlEncode(); return(x); } catch (Exception e) { Trace.TraceError(e.Message); return(null); } }) .Where(x => x != null)); } var recordOwnership = new Dictionary <string, bool>(); if (rs.recordMetaData != null) { foreach (var rmd in rs.recordMetaData) { recordOwnership[rmd.RecordUid] = rmd.Owner; } } if (rs.records != null) { result.AddRecords(rs.records.Select(x => x.RecordUid)); storage.Records.PutEntities(rs.records .Select(x => { x.AdjustUdata(); if (!recordOwnership.ContainsKey(x.RecordUid)) { return(x); } x.Owner = recordOwnership[x.RecordUid]; recordOwnership.Remove(x.RecordUid); return(x); })); } if (rs.recordMetaData != null) { result.AddRecords(rs.recordMetaData.Select(x => x.RecordUid)); var toUpdate = rs.recordMetaData .Where(x => recordOwnership.ContainsKey(x.RecordUid)) .Select(x => { var sr = storage.Records.GetEntity(x.RecordUid); if (sr == null) { return(null); } if (sr.Owner == x.Owner) { return(null); } sr.Owner = x.Owner; return(sr); }) .Where(x => x != null) .ToArray(); if (toUpdate.Any()) { storage.Records.PutEntities(toUpdate); } var rmds = rs.recordMetaData .Select(rmd => { try { byte[] key; switch (rmd.RecordKeyType) { case 0: key = context.DataKey; break; case 1: key = CryptoUtils.DecryptAesV1(rmd.RecordKey.Base64UrlDecode(), context.DataKey); break; case 2: key = CryptoUtils.DecryptRsa(rmd.RecordKey.Base64UrlDecode(), context.PrivateKey); break; default: throw new Exception( $"Record metadata UID {rmd.RecordUid}: unsupported key type {rmd.RecordKeyType}"); } if (key != null) { rmd.RecordKey = CryptoUtils.EncryptAesV1(key, context.ClientKey).Base64UrlEncode(); rmd.RecordKeyType = (int)KeyType.DataKey; rmd.SharedFolderUid = storage.PersonalScopeUid; return(rmd); } } catch (Exception e) { Trace.TraceError(e.Message); } return(null); }) .ToArray(); storage.RecordKeys.PutLinks(rmds); } if (rs.teams != null) { var removedSharedFolderLinks = rs.teams .Where(x => x.removedSharedFolders != null) .SelectMany(x => x.removedSharedFolders, (team, sharedFolderUid) => UidLink.Create(sharedFolderUid, team.TeamUid)) .Cast <IUidLink>() .ToArray(); if (removedSharedFolderLinks.Any()) { result.AddSharedFolders(removedSharedFolderLinks.Select(x => x.SubjectUid)); storage.SharedFolderKeys.DeleteLinks(removedSharedFolderLinks); } var teams = rs.teams .Select(x => { try { byte[] teamKey; switch (x.KeyType) { case (int)KeyType.DataKey: teamKey = CryptoUtils.DecryptAesV1(x.TeamKey.Base64UrlDecode(), context.DataKey); break; case (int)KeyType.PrivateKey: teamKey = CryptoUtils.DecryptRsa(x.TeamKey.Base64UrlDecode(), context.PrivateKey); break; default: throw new Exception($"Team UID {x.TeamUid}: unsupported key type {x.KeyType}"); } x.TeamKey = CryptoUtils.EncryptAesV1(teamKey, clientKey).Base64UrlEncode(); x.KeyType = (int)KeyType.DataKey; return(x); } catch (Exception e) { Trace.TraceError(e.Message); return(null); } }) .Where(x => x != null) .ToArray(); storage.Teams.PutEntities(teams); var sharedFolderKeys = rs.teams .Where(x => x.sharedFolderKeys != null) .SelectMany(x => x.sharedFolderKeys, (team, sharedFolderKey) => { sharedFolderKey.TeamUid = team.TeamUid; sharedFolderKey.KeyType = sharedFolderKey.KeyType == 2 ? (int)KeyType.TeamPrivateKey : (int)KeyType.TeamKey; return(sharedFolderKey); }) .ToArray(); storage.SharedFolderKeys.PutLinks(sharedFolderKeys); } if (rs.sharedFolders != null) { result.AddSharedFolders(rs.sharedFolders.Select(x => x.SharedFolderUid)); // shared folders storage.SharedFolders.PutEntities(rs.sharedFolders); // shared folder keys var sharedFolderKeys = rs.sharedFolders .Where(x => !string.IsNullOrEmpty(x.SharedFolderKey)) .Select(x => { try { var sharedFolderKey = x.SharedFolderKey.Base64UrlDecode(); switch (x.KeyType) { case 1: sharedFolderKey = CryptoUtils.DecryptAesV1(sharedFolderKey, context.DataKey); break; case 2: sharedFolderKey = CryptoUtils.DecryptRsa(sharedFolderKey, context.PrivateKey); break; default: throw new Exception( $"Shared Folder UID {x.SharedFolderUid}: unsupported key type {x.KeyType}"); } return(new SyncDownSharedFolderKey { SharedFolderUid = x.SharedFolderUid, TeamUid = storage.PersonalScopeUid, SharedFolderKey = CryptoUtils.EncryptAesV1(sharedFolderKey, clientKey) .Base64UrlEncode(), KeyType = (int)KeyType.DataKey }); } catch (Exception e) { Trace.TraceError(e.Message); return(null); } }) .ToArray(); if (sharedFolderKeys.Any()) { storage.SharedFolderKeys.PutLinks(sharedFolderKeys); } result.AddRecords(rs.sharedFolders .Where(x => x.records != null) .SelectMany(x => x.records, (sf, r) => r.RecordUid)); // Records var sharedFolderRecords = rs.sharedFolders .Where(x => x.records != null) .SelectMany(x => x.records, (sf, sfr) => new SyncDownRecordMetaData { SharedFolderUid = sf.SharedFolderUid, RecordUid = sfr.RecordUid, RecordKey = sfr.RecordKey, RecordKeyType = (int)KeyType.SharedFolderKey, CanEdit = sfr.CanEdit, CanShare = sfr.CanShare }) .ToArray(); if (sharedFolderRecords.Any()) { storage.RecordKeys.PutLinks(sharedFolderRecords); } // Teams var teams = rs.sharedFolders .Where(x => x.teams != null) .SelectMany(x => x.teams, (sf, sft) => { sft.SharedFolderUid = sf.SharedFolderUid; return(sft); }) .Cast <ISharedFolderPermission>() .ToArray(); // Users var users = rs.sharedFolders .Where(x => x.users != null) .SelectMany(x => x.users, (sf, sfu) => { sfu.SharedFolderUid = sf.SharedFolderUid; return(sfu); }) .Cast <ISharedFolderPermission>() .ToArray(); if (teams.Any() || users.Any()) { storage.SharedFolderPermissions.PutLinks(teams.Concat(users)); } } if (rs.userFolders != null) { var userFolders = rs.userFolders .Select(uf => { try { var folderKey = uf.FolderKey.Base64UrlDecode(); switch (uf.keyType) { case (int)KeyType.DataKey: folderKey = CryptoUtils.DecryptAesV1(folderKey, context.DataKey); break; case (int)KeyType.PrivateKey: folderKey = CryptoUtils.DecryptRsa(folderKey, context.PrivateKey); break; default: throw new Exception($"User Folder UID {uf.FolderUid}: unsupported key type {uf.keyType}"); } uf.FolderKey = CryptoUtils.EncryptAesV1(folderKey, clientKey).Base64UrlEncode(); uf.keyType = (int)KeyType.DataKey; return(uf); } catch (Exception e) { Trace.TraceError(e.Message); return(null); } }) .ToArray(); storage.Folders.PutEntities(userFolders); } if (rs.sharedFolderFolders != null) { storage.Folders.PutEntities(rs.sharedFolderFolders); } if (rs.userFolderSharedFolders != null) { storage.Folders.PutEntities(rs.userFolderSharedFolders); } if (rs.userFolderRecords != null) { storage.FolderRecords.PutLinks(rs.userFolderRecords .Select(ufr => { ufr.FolderUid = string.IsNullOrEmpty(ufr.FolderUid) ? storage.PersonalScopeUid : ufr.FolderUid; return(ufr); })); } if (rs.sharedFolderFolderRecords != null) { storage.FolderRecords.PutLinks(rs.sharedFolderFolderRecords); } storage.Revision = rs.revision; Debug.WriteLine("Sync Down: Leave"); Debug.WriteLine("Rebuild Data: Enter"); vault.RebuildData(result); Debug.WriteLine("Rebuild Data: Leave"); }
public static async Task <PasswordRecord> PutRecord(this VaultOnline vault, PasswordRecord record, bool skipData = false, bool skipExtra = true) { IPasswordRecord existingRecord = null; if (!string.IsNullOrEmpty(record.Uid)) { existingRecord = vault.Storage.Records.GetEntity(record.Uid); } if (existingRecord == null) { return(await vault.AddRecordToFolder(record)); } var updateRecord = new RecordUpdateRecord { RecordUid = existingRecord.RecordUid }; var rmd = vault.ResolveRecordAccessPath(updateRecord, true); if (rmd != null) { if (rmd.RecordKeyType == (int)KeyType.NoKey || rmd.RecordKeyType == (int)KeyType.PrivateKey) { updateRecord.RecordKey = CryptoUtils.EncryptAesV1(record.RecordKey, vault.Auth.AuthContext.DataKey) .Base64UrlEncode(); } } updateRecord.Revision = existingRecord.Revision; if (!skipData) { var dataSerializer = new DataContractJsonSerializer(typeof(RecordData), JsonUtils.JsonSettings); RecordData existingData = null; try { var unencryptedData = CryptoUtils.DecryptAesV1(existingRecord.Data.Base64UrlDecode(), record.RecordKey); using (var ms = new MemoryStream(unencryptedData)) { existingData = (RecordData)dataSerializer.ReadObject(ms); } } catch (Exception e) { Trace.TraceError("Decrypt Record: UID: {0}, {1}: \"{2}\"", existingRecord.RecordUid, e.GetType().Name, e.Message); } var data = record.ExtractRecordData(existingData); using (var ms = new MemoryStream()) { dataSerializer.WriteObject(ms, data); updateRecord.Data = CryptoUtils.EncryptAesV1(ms.ToArray(), record.RecordKey).Base64UrlEncode(); } } if (!skipExtra) { var extraSerializer = new DataContractJsonSerializer(typeof(RecordExtra), JsonUtils.JsonSettings); RecordExtra existingExtra = null; try { var unencryptedExtra = CryptoUtils.DecryptAesV1(existingRecord.Extra.Base64UrlDecode(), record.RecordKey); using (var ms = new MemoryStream(unencryptedExtra)) { existingExtra = (RecordExtra)extraSerializer.ReadObject(ms); } } catch (Exception e) { Trace.TraceError("Decrypt Record: UID: {0}, {1}: \"{2}\"", existingRecord.RecordUid, e.GetType().Name, e.Message); } var extra = record.ExtractRecordExtra(existingExtra); using (var ms = new MemoryStream()) { extraSerializer.WriteObject(ms, extra); updateRecord.Extra = CryptoUtils.EncryptAesV1(ms.ToArray(), record.RecordKey).Base64UrlEncode(); } var udata = new RecordUpdateUData(); var ids = new HashSet <string>(); if (record.Attachments != null) { foreach (var atta in record.Attachments) { ids.Add(atta.Id); if (atta.Thumbnails != null) { foreach (var thumb in atta.Thumbnails) { ids.Add(thumb.Id); } } } } udata.FileIds = ids.ToArray(); updateRecord.Udata = udata; } var command = new RecordUpdateCommand { deviceId = vault.Auth.Endpoint.DeviceName, UpdateRecords = new[] { updateRecord } }; await vault.Auth.ExecuteAuthCommand <RecordUpdateCommand, RecordUpdateResponse>(command); await vault.ScheduleSyncDown(TimeSpan.FromSeconds(0)); return(vault.TryGetRecord(record.Uid, out var r) ? r : record); }
public static string ComputeHash(string buf) { return(CryptoUtils.SHA1Hash(buf, Encoding.Default)); }
public static async Task <FolderNode> AddFolder <T>(this VaultOnline vault, string folderName, string parentFolderUid = null, T sharedFolderOptions = null) where T : class, ISharedFolderUserOptions, ISharedFolderRecordOptions { var parent = vault.GetFolder(parentFolderUid); FolderType folderType; if (sharedFolderOptions != null) { if (parent.FolderType != FolderType.UserFolder) { throw new VaultException($"Shared folder cannot be created"); } folderType = FolderType.SharedFolder; } else { folderType = parent.FolderType == FolderType.UserFolder ? FolderType.UserFolder : FolderType.SharedFolderFolder; } var encryptionKey = vault.Auth.AuthContext.DataKey; SharedFolder sharedFolder = null; if (folderType == FolderType.SharedFolderFolder) { var sharedFolderUid = parent.FolderType == FolderType.SharedFolder ? parent.FolderUid : parent.SharedFolderUid; sharedFolder = vault.GetSharedFolder(sharedFolderUid); encryptionKey = sharedFolder.SharedFolderKey; } var data = new FolderData { name = folderName ?? "", }; var dataBytes = JsonUtils.DumpJson(data); var folderKey = CryptoUtils.GenerateEncryptionKey(); var request = new FolderAddCommand { FolderUid = CryptoUtils.GenerateUid(), FolderType = folderType.GetFolderTypeText(), Key = CryptoUtils.EncryptAesV1(folderKey, encryptionKey).Base64UrlEncode(), Data = CryptoUtils.EncryptAesV1(dataBytes, folderKey).Base64UrlEncode(), ParentUid = string.IsNullOrEmpty(parent.FolderUid) || parent.FolderType == FolderType.SharedFolder ? null : parent.FolderUid, SharedFolderUid = sharedFolder?.Uid, }; if (sharedFolderOptions != null) { request.Name = CryptoUtils.EncryptAesV1(Encoding.UTF8.GetBytes(folderName), folderKey).Base64UrlEncode(); request.ManageUsers = sharedFolderOptions.ManageUsers ?? false; request.ManageRecords = sharedFolderOptions.ManageRecords ?? false; request.CanEdit = sharedFolderOptions.CanEdit ?? false; request.CanShare = sharedFolderOptions.CanShare ?? false; } _ = await vault.Auth.ExecuteAuthCommand <FolderAddCommand, AddFolderResponse>(request); await vault.ScheduleSyncDown(TimeSpan.FromSeconds(0)); return(vault.TryGetFolder(request.FolderUid, out var f) ? f : null); }
public override void Verify() { // Verify that attStmt is valid CBOR conforming to the syntax defined above and perform // CBOR decoding on it to extract the contained fields if ((CBORType.TextString != attStmt["ver"].Type) || (0 == attStmt["ver"].AsString().Length)) { throw new Fido2VerificationException("Invalid version in SafetyNet data"); } // Verify that response is a valid SafetyNet response of version ver var ver = attStmt["ver"].AsString(); if ((CBORType.ByteString != attStmt["response"].Type) || (0 == attStmt["response"].GetByteString().Length)) { throw new Fido2VerificationException("Invalid response in SafetyNet data"); } var response = attStmt["response"].GetByteString(); var signedAttestationStatement = Encoding.UTF8.GetString(response); var jwtToken = new JwtSecurityToken(signedAttestationStatement); X509SecurityKey[] keys = (jwtToken.Header["x5c"] as JArray) .Values <string>() .Select(x => new X509SecurityKey( new X509Certificate2(Convert.FromBase64String(x)))) .ToArray(); if ((null == keys) || (0 == keys.Count())) { throw new Fido2VerificationException("SafetyNet attestation missing x5c"); } var validationParameters = new TokenValidationParameters { ValidateIssuer = false, ValidateAudience = false, ValidateLifetime = false, ValidateIssuerSigningKey = true, IssuerSigningKeys = keys }; var tokenHandler = new JwtSecurityTokenHandler(); tokenHandler.ValidateToken( signedAttestationStatement, validationParameters, out var validatedToken); if (false == (validatedToken.SigningKey is X509SecurityKey)) { throw new Fido2VerificationException("Safetynet signing key invalid"); } var nonce = ""; var payload = false; foreach (var claim in jwtToken.Claims) { if (("nonce" == claim.Type) && ("http://www.w3.org/2001/XMLSchema#string" == claim.ValueType) && (0 != claim.Value.Length)) { nonce = claim.Value; } if (("ctsProfileMatch" == claim.Type) && ("http://www.w3.org/2001/XMLSchema#boolean" == claim.ValueType)) { payload = bool.Parse(claim.Value); } if (("timestampMs" == claim.Type) && ("http://www.w3.org/2001/XMLSchema#integer64" == claim.ValueType)) { var dt = DateTimeHelper.UnixEpoch.AddMilliseconds(double.Parse(claim.Value)); if ((DateTime.UtcNow < dt) || (DateTime.UtcNow.AddMinutes(-1) > dt)) { throw new Fido2VerificationException("Android SafetyNet timestampMs must be between one minute ago and now"); } } } // Verify that the nonce in the response is identical to the SHA-256 hash of the concatenation of authenticatorData and clientDataHash if ("" == nonce) { throw new Fido2VerificationException("Nonce value not found in Android SafetyNet attestation"); } if (!CryptoUtils.GetHasher(HashAlgorithmName.SHA256).ComputeHash(Data).SequenceEqual(Convert.FromBase64String(nonce))) { throw new Fido2VerificationException("Android SafetyNet hash value mismatch"); } // Verify that the attestation certificate is issued to the hostname "attest.android.com" if (false == ("attest.android.com").Equals((validatedToken.SigningKey as X509SecurityKey).Certificate.GetNameInfo(X509NameType.DnsName, false))) { throw new Fido2VerificationException("Safetynet DnsName is not attest.android.com"); } // Verify that the ctsProfileMatch attribute in the payload of response is true if (true != payload) { throw new Fido2VerificationException("Android SafetyNet ctsProfileMatch must be true"); } }
public static async Task <FolderNode> FolderUpdate(this VaultOnline vault, string folderUid, string folderName, SharedFolderOptions sharedFolderOptions = null) { if (string.IsNullOrEmpty(folderName) && sharedFolderOptions == null) { throw new VaultException("Folder name cannot be empty"); } var folder = vault.GetFolder(folderUid); if (string.IsNullOrEmpty(folderName)) { folderName = folder.Name; } var parent = vault.RootFolder; if (!string.IsNullOrEmpty(folder.ParentUid)) { vault.TryGetFolder(folder.ParentUid, out parent); } var nameExists = parent.Subfolders .Select(x => vault.TryGetFolder(x, out var v) ? v : null) .Any(x => x != null && x.FolderUid != folderUid && string.Compare(x.Name, folderName, StringComparison.InvariantCultureIgnoreCase) == 0); if (nameExists) { throw new VaultException($"Folder with name {folderName} already exists in {parent.Name}"); } var request = new FolderUpdateCommand { FolderUid = folder.FolderUid, FolderType = folder.FolderType.GetFolderTypeText(), ParentUid = string.IsNullOrEmpty(folder.ParentUid) ? null : folder.ParentUid, SharedFolderUid = string.IsNullOrEmpty(folder.SharedFolderUid) ? null : folder.SharedFolderUid, }; var existingRecord = vault.Storage.Folders.GetEntity(folderUid); var data = string.IsNullOrEmpty(existingRecord?.Data) ? new FolderData() : JsonUtils.ParseJson <FolderData>(existingRecord.Data.Base64UrlDecode()); data.name = folderName; var dataBytes = JsonUtils.DumpJson(data); var encryptionKey = vault.Auth.AuthContext.DataKey; if (folder.FolderType == FolderType.SharedFolderFolder) { encryptionKey = vault.GetSharedFolder(folder.SharedFolderUid).SharedFolderKey; } request.Data = CryptoUtils.EncryptAesV1(dataBytes, encryptionKey).Base64UrlEncode(); if (folder.FolderType != FolderType.UserFolder) { var sharedFolderUid = folder.FolderType == FolderType.UserFolder ? folder.FolderUid : folder.SharedFolderUid; var perm = vault.ResolveSharedFolderAccessPath(vault.Auth.Username, sharedFolderUid, true, true); if (perm != null) { if (perm.UserType == UserType.Team) { request.TeamUid = perm.UserId; } } else { throw new VaultException($"You don't have permissions to modify shared folder ({sharedFolderUid})"); } } if (sharedFolderOptions != null && folder.FolderType == FolderType.SharedFolder) { if (!vault.TryGetSharedFolder(folder.FolderUid, out var sharedFolder)) { request.Name = CryptoUtils.EncryptAesV1(Encoding.UTF8.GetBytes(folderName), sharedFolder.SharedFolderKey).Base64UrlEncode(); } request.ManageUsers = sharedFolderOptions.ManageUsers; request.ManageRecords = sharedFolderOptions.ManageRecords; request.CanEdit = sharedFolderOptions.CanEdit; request.CanShare = sharedFolderOptions.CanShare; } await vault.Auth.ExecuteAuthCommand(request); await vault.ScheduleSyncDown(TimeSpan.FromSeconds(0)); return(vault.TryGetFolder(request.FolderUid, out var f) ? f : null); }
public static Servicios getNewTiposServicios(DataRow row) { int num; string empty; string str; string empty1; string str1; string empty2; int num1; int num2; string str2; string empty3; string str3; string empty4; Servicios servicio = new Servicios(); Servicios servicio1 = servicio; if (row.Table.Columns.Contains("SE_ServicioId")) { num = (row.IsNull("SE_ServicioId") ? 0 : int.Parse(row["SE_ServicioId"].ToString())); } else { num = 0; } servicio1.SE_ServicioId = num; Servicios servicio2 = servicio; if (row.Table.Columns.Contains("SE_Activo")) { empty = (row.IsNull("SE_Activo") ? string.Empty : row["SE_Activo"].ToString()); } else { empty = string.Empty; } servicio2.SE_Activo = empty; Servicios servicio3 = servicio; if (row.Table.Columns.Contains("SE_CodBanco")) { str = (row.IsNull("SE_CodBanco") ? string.Empty : row["SE_CodBanco"].ToString()); } else { str = string.Empty; } servicio3.SE_CodBanco = str; Servicios servicio4 = servicio; if (row.Table.Columns.Contains("SE_CodTransaccion")) { empty1 = (row.IsNull("SE_CodTransaccion") ? string.Empty : row["SE_CodTransaccion"].ToString()); } else { empty1 = string.Empty; } servicio4.SE_CodTransaccion = empty1; Servicios servicio5 = servicio; if (row.Table.Columns.Contains("SE_CtaAdministrativa")) { str1 = (row.IsNull("SE_CtaAdministrativa") ? string.Empty : row["SE_CtaAdministrativa"].ToString()); } else { str1 = string.Empty; } servicio5.SE_CtaAdministrativa = str1; Servicios servicio6 = servicio; if (row.Table.Columns.Contains("SE_Grupo")) { empty2 = (row.IsNull("SE_Grupo") ? string.Empty : row["SE_Grupo"].ToString()); } else { empty2 = string.Empty; } servicio6.SE_Grupo = empty2; Servicios servicio7 = servicio; if (row.Table.Columns.Contains("SE_HoraFin")) { num1 = (row.IsNull("SE_HoraFin") ? 0 : int.Parse(row["SE_HoraFin"].ToString())); } else { num1 = 0; } servicio7.SE_HoraFin = num1; Servicios servicio8 = servicio; if (row.Table.Columns.Contains("SE_HoraInicio")) { num2 = (row.IsNull("SE_HoraInicio") ? 0 : int.Parse(row["SE_HoraInicio"].ToString())); } else { num2 = 0; } servicio8.SE_HoraInicio = num2; Servicios servicio9 = servicio; if (row.Table.Columns.Contains("SE_Nombre")) { str2 = (row.IsNull("SE_Nombre") ? string.Empty : row["SE_Nombre"].ToString()); } else { str2 = string.Empty; } servicio9.SE_Nombre = str2; Servicios servicio10 = servicio; if (row.Table.Columns.Contains("SE_OlbId")) { empty3 = (row.IsNull("SE_OlbId") ? string.Empty : row["SE_OlbId"].ToString()); } else { empty3 = string.Empty; } servicio10.SE_OlbId = empty3; Servicios servicio11 = servicio; if (row.Table.Columns.Contains("SE_RIF")) { str3 = (row.IsNull("SE_RIF") ? string.Empty : row["SE_RIF"].ToString()); } else { str3 = string.Empty; } servicio11.SE_RIF = str3; Servicios servicio12 = servicio; if (row.Table.Columns.Contains("SE_Nemotecnico")) { empty4 = (row.IsNull("SE_Nemotecnico") ? string.Empty : row["SE_Nemotecnico"].ToString()); } else { empty4 = string.Empty; } servicio12.SE_Nemotecnico = empty4; int sEServicioId = servicio.SE_ServicioId; servicio.Key = CryptoUtils.EncryptMD5(sEServicioId.ToString()); return(servicio); }
public static async Task <PasswordRecord> AddRecordToFolder(this VaultOnline vault, PasswordRecord record, string folderUid = null) { FolderNode node = null; if (!string.IsNullOrEmpty(folderUid)) { vault.TryGetFolder(folderUid, out node); } record.Uid = CryptoUtils.GenerateUid(); record.RecordKey = CryptoUtils.GenerateEncryptionKey(); var recordAdd = new RecordAddCommand { RecordUid = record.Uid, RecordKey = CryptoUtils.EncryptAesV1(record.RecordKey, vault.Auth.AuthContext.DataKey).Base64UrlEncode(), RecordType = "password" }; if (node == null) { recordAdd.FolderType = "user_folder"; } else { switch (node.FolderType) { case FolderType.UserFolder: recordAdd.FolderType = "user_folder"; recordAdd.FolderUid = node.FolderUid; break; case FolderType.SharedFolder: case FolderType.SharedFolderFolder: recordAdd.FolderUid = node.FolderUid; recordAdd.FolderType = node.FolderType == FolderType.SharedFolder ? "shared_folder" : "shared_folder_folder"; if (vault.TryGetSharedFolder(node.SharedFolderUid, out var sf)) { recordAdd.FolderKey = CryptoUtils.EncryptAesV1(record.RecordKey, sf.SharedFolderKey) .Base64UrlEncode(); } if (string.IsNullOrEmpty(recordAdd.FolderKey)) { throw new Exception($"Cannot resolve shared folder for folder UID: {folderUid}"); } break; } } var dataSerializer = new DataContractJsonSerializer(typeof(RecordData), JsonUtils.JsonSettings); var data = record.ExtractRecordData(); using (var ms = new MemoryStream()) { dataSerializer.WriteObject(ms, data); recordAdd.Data = CryptoUtils.EncryptAesV1(ms.ToArray(), record.RecordKey).Base64UrlEncode(); } await vault.Auth.ExecuteAuthCommand(recordAdd); await vault.ScheduleSyncDown(TimeSpan.FromSeconds(0)); return(vault.TryGetRecord(record.Uid, out var r) ? r : record); }
static void Main(string[] args) { Console.Write(CryptoUtils.Generate3DESKey()); }
public DCUser2 GetCache(string sessionid) { return(CacheMgr.Get <DCUser2>(CacheMgr.Get <string>(CryptoUtils.DecryptTripleDES(sessionid)))); }
/// <summary> /// Decrypt payload /// </summary> /// <param name="receiverPrivateKey"></param> /// <param name="senderPublicKey"></param> /// <returns></returns> public string DecryptPayload(string receiverPrivateKey, string senderPublicKey) { return(CryptoUtils.Decode(Payload.ToHexLower(), receiverPrivateKey, senderPublicKey)); }
static void Main(string[] args) { Console.SetWindowSize(120, 40); Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine(Strings.psaWarning); Console.ForegroundColor = ConsoleColor.Gray; Console.WriteLine(Strings.psaStartMsg); Console.ReadLine(); Console.Clear(); Console.ForegroundColor = ConsoleColor.DarkYellow; Random random = new Random(); int psaLogoInt = random.Next(Strings.psaLogos.Count); Console.WriteLine(Strings.psaLogos[psaLogoInt]); Console.WriteLine("Version {0}\n", Strings.version); Console.ForegroundColor = ConsoleColor.White; StreamReader sr = new StreamReader("modules.json"); string modulesJson = sr.ReadToEnd(); MemoryStream memReader = new MemoryStream(Encoding.UTF8.GetBytes(modulesJson)); Console.WriteLine("[*] Getting modules from local json."); List <Module> modules = PSAUtils.GetModuleList(memReader); string workingDir = PSAUtils.GetPSAttackDir(); Console.WriteLine("[*] Looking for latest release of PS>Punch"); Punch punch = PSAUtils.GetPSPunch(new Uri(Strings.punchURL)); Console.WriteLine("[*] Got Punch Version: {0}", punch.tag_name); Console.WriteLine("[*] Downloading: {0}", punch.zipball_url); punch.DownloadZip(); Console.WriteLine("[*] Unzipping to: {0}", Strings.punchUnzipDir); punch.unzipped_dir = PSAUtils.UnzipFile(Strings.punchZipPath); Console.WriteLine("[*] Clearing modules at: {0}", punch.modules_dir); punch.ClearModules(); if (!(Directory.Exists(Strings.moduleSrcDir))) { Directory.CreateDirectory(Strings.moduleSrcDir); } foreach (Module module in modules) { string dest = Path.Combine(Strings.moduleSrcDir, (module.name + ".ps1")); string encOutfile = punch.modules_dir + CryptoUtils.EncryptString(punch, module.name) + ".ps1.enc"; try { PSAUtils.DownloadFile(module.url, dest); Console.WriteLine("[*] Encrypting: {0}", dest); CryptoUtils.EncryptFile(punch, dest, encOutfile); } catch (Exception e) { ConsoleColor origColor = Console.ForegroundColor; Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("There was an error processing {0}. \nError message: \n\n{1}\n", module.name, e.Message); Console.ForegroundColor = origColor; } } Console.WriteLine("Generating PSPunch.csproj at {0}", punch.csproj_file); PSAUtils.BuildCsproj(modules, punch); Console.WriteLine("[*] Building PSPunch!"); Console.ForegroundColor = ConsoleColor.Gray; int exitCode = PSAUtils.BuildPunch(punch); if (exitCode == 0) { Console.ForegroundColor = ConsoleColor.Green; Console.WriteLine(Strings.psaEndSuccess, Strings.punchBuildDir); Console.ReadLine(); Process.Start(Strings.punchBuildDir); } else if (exitCode == 999) { Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine(Strings.psaEndNoMSBuild, System.Runtime.InteropServices.RuntimeEnvironment.GetRuntimeDirectory()); Console.ReadLine(); Environment.Exit(exitCode); } else { Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine(Strings.psaEndFailure); Console.ReadLine(); Environment.Exit(exitCode); } }
internal void HandlePackage(Package message, PlayerNetworkSession playerSession) { //SignalTick(); try { if (message == null) { return; } if (typeof(UnknownPackage) == message.GetType()) { UnknownPackage packet = (UnknownPackage)message; if (Log.IsDebugEnabled) { Log.Warn($"Received unknown package 0x{message.Id:X2}\n{Package.HexDump(packet.Message)}"); } message.PutPool(); return; } if (typeof(McpeWrapper) == message.GetType()) { McpeWrapper batch = (McpeWrapper)message; var messages = new List <Package>(); // Get bytes byte[] payload = batch.payload; if (playerSession.CryptoContext != null && playerSession.CryptoContext.UseEncryption) { payload = CryptoUtils.Decrypt(payload, playerSession.CryptoContext); } // Decompress bytes MemoryStream stream = new MemoryStream(payload); if (stream.ReadByte() != 0x78) { throw new InvalidDataException("Incorrect ZLib header. Expected 0x78 0x9C"); } stream.ReadByte(); using (var defStream2 = new DeflateStream(stream, CompressionMode.Decompress, false)) { // Get actual package out of bytes using (MemoryStream destination = MiNetServer.MemoryStreamManager.GetStream()) { defStream2.CopyTo(destination); destination.Position = 0; NbtBinaryReader reader = new NbtBinaryReader(destination, true); while (destination.Position < destination.Length) { //int len = reader.ReadInt32(); int len = BatchUtils.ReadLength(destination); byte[] internalBuffer = reader.ReadBytes(len); //if (Log.IsDebugEnabled) // Log.Debug($"0x{internalBuffer[0]:x2}\n{Package.HexDump(internalBuffer)}"); messages.Add(PackageFactory.CreatePackage(internalBuffer[0], internalBuffer, "mcpe") ?? new UnknownPackage(internalBuffer[0], internalBuffer)); } if (destination.Length > destination.Position) { throw new Exception("Have more data"); } } } foreach (var msg in messages) { // Temp fix for performance, take 1. var interact = msg as McpeInteract; if (interact?.actionId == 4 && interact.targetRuntimeEntityId == 0) { continue; } msg.DatagramSequenceNumber = batch.DatagramSequenceNumber; msg.Reliability = batch.Reliability; msg.ReliableMessageNumber = batch.ReliableMessageNumber; msg.OrderingChannel = batch.OrderingChannel; msg.OrderingIndex = batch.OrderingIndex; HandlePackage(msg, playerSession); } message.PutPool(); return; } MiNetServer.TraceReceive(message); if (CryptoContext != null && CryptoContext.UseEncryption) { MiNetServer.FastThreadPool.QueueUserWorkItem(delegate() { HandlePackage(MessageHandler, message as Package); message.PutPool(); }); } else { HandlePackage(MessageHandler, message); message.PutPool(); } } catch (Exception e) { Log.Error("Package handling", e); throw; } }
public DsaKeyValue() { var pair = CryptoUtils.DSAGenerateKeyPair(); _key = (DsaPublicKeyParameters)pair.Public; }
/// <exception cref="System.IO.IOException"/> internal virtual RawKeyValueIterator Merge(Type keyClass, Type valueClass, int factor , int inMem, Path tmpDir, Counters.Counter readsCounter, Counters.Counter writesCounter , Progress mergePhase) { Log.Info("Merging " + segments.Count + " sorted segments"); /* * If there are inMemory segments, then they come first in the segments * list and then the sorted disk segments. Otherwise(if there are only * disk segments), then they are sorted segments if there are more than * factor segments in the segments list. */ int numSegments = segments.Count; int origFactor = factor; int passNo = 1; if (mergePhase != null) { mergeProgress = mergePhase; } long totalBytes = ComputeBytesInMerges(factor, inMem); if (totalBytes != 0) { progPerByte = 1.0f / (float)totalBytes; } do { //create the MergeStreams from the sorted map created in the constructor //and dump the final output to a file //get the factor for this pass of merge. We assume in-memory segments //are the first entries in the segment list and that the pass factor //doesn't apply to them factor = GetPassFactor(factor, passNo, numSegments - inMem); if (1 == passNo) { factor += inMem; } IList <Merger.Segment <K, V> > segmentsToMerge = new AList <Merger.Segment <K, V> >(); int segmentsConsidered = 0; int numSegmentsToConsider = factor; long startBytes = 0; // starting bytes of segments of this merge while (true) { //extract the smallest 'factor' number of segments //Call cleanup on the empty segments (no key/value data) IList <Merger.Segment <K, V> > mStream = GetSegmentDescriptors(numSegmentsToConsider ); foreach (Merger.Segment <K, V> segment in mStream) { // Initialize the segment at the last possible moment; // this helps in ensuring we don't use buffers until we need them segment.Init(readsCounter); long startPos = segment.GetReader().bytesRead; bool hasNext = segment.NextRawKey(); long endPos = segment.GetReader().bytesRead; if (hasNext) { startBytes += endPos - startPos; segmentsToMerge.AddItem(segment); segmentsConsidered++; } else { segment.Close(); numSegments--; } } //we ignore this segment for the merge //if we have the desired number of segments //or looked at all available segments, we break if (segmentsConsidered == factor || segments.Count == 0) { break; } numSegmentsToConsider = factor - segmentsConsidered; } //feed the streams to the priority queue Initialize(segmentsToMerge.Count); Clear(); foreach (Merger.Segment <K, V> segment_1 in segmentsToMerge) { Put(segment_1); } //if we have lesser number of segments remaining, then just return the //iterator, else do another single level merge if (numSegments <= factor) { if (!includeFinalMerge) { // for reduce task // Reset totalBytesProcessed and recalculate totalBytes from the // remaining segments to track the progress of the final merge. // Final merge is considered as the progress of the reducePhase, // the 3rd phase of reduce task. totalBytesProcessed = 0; totalBytes = 0; for (int i = 0; i < segmentsToMerge.Count; i++) { totalBytes += segmentsToMerge[i].GetRawDataLength(); } } if (totalBytes != 0) { //being paranoid progPerByte = 1.0f / (float)totalBytes; } totalBytesProcessed += startBytes; if (totalBytes != 0) { mergeProgress.Set(totalBytesProcessed * progPerByte); } else { mergeProgress.Set(1.0f); } // Last pass and no segments left - we're done Log.Info("Down to the last merge-pass, with " + numSegments + " segments left of total size: " + (totalBytes - totalBytesProcessed) + " bytes"); return(this); } else { Log.Info("Merging " + segmentsToMerge.Count + " intermediate segments out of a total of " + (segments.Count + segmentsToMerge.Count)); long bytesProcessedInPrevMerges = totalBytesProcessed; totalBytesProcessed += startBytes; //we want to spread the creation of temp files on multiple disks if //available under the space constraints long approxOutputSize = 0; foreach (Merger.Segment <K, V> s in segmentsToMerge) { approxOutputSize += s.GetLength() + ChecksumFileSystem.GetApproxChkSumLength(s.GetLength ()); } Path tmpFilename = new Path(tmpDir, "intermediate").Suffix("." + passNo); Path outputFile = lDirAlloc.GetLocalPathForWrite(tmpFilename.ToString(), approxOutputSize , conf); FSDataOutputStream @out = fs.Create(outputFile); @out = CryptoUtils.WrapIfNecessary(conf, @out); IFile.Writer <K, V> writer = new IFile.Writer <K, V>(conf, @out, keyClass, valueClass , codec, writesCounter, true); WriteFile(this, writer, reporter, conf); writer.Close(); //we finished one single level merge; now clean up the priority //queue this.Close(); // Add the newly create segment to the list of segments to be merged Merger.Segment <K, V> tempSegment = new Merger.Segment <K, V>(conf, fs, outputFile, codec, false); // Insert new merged segment into the sorted list int pos = Sharpen.Collections.BinarySearch(segments, tempSegment, segmentComparator ); if (pos < 0) { // binary search failed. So position to be inserted at is -pos-1 pos = -pos - 1; } segments.Add(pos, tempSegment); numSegments = segments.Count; // Subtract the difference between expected size of new segment and // actual size of new segment(Expected size of new segment is // inputBytesOfThisMerge) from totalBytes. Expected size and actual // size will match(almost) if combiner is not called in merge. long inputBytesOfThisMerge = totalBytesProcessed - bytesProcessedInPrevMerges; totalBytes -= inputBytesOfThisMerge - tempSegment.GetRawDataLength(); if (totalBytes != 0) { progPerByte = 1.0f / (float)totalBytes; } passNo++; } //we are worried about only the first pass merge factor. So reset the //factor to what it originally was factor = origFactor; }while (true); }
public void Test_OneNodeCycle() { var stateManager = _container?.Resolve <IStateManager>(); var contractRegisterer = _container?.Resolve <IContractRegisterer>(); var tx = new TransactionReceipt(); var sender = new BigInteger(0).ToUInt160(); var context = new InvocationContext(sender, stateManager !.LastApprovedSnapshot, tx); var contract = new GovernanceContract(context); var keyPair = new EcdsaKeyPair("0xD95D6DB65F3E2223703C5D8E205D98E3E6B470F067B0F94F6C6BF73D4301CE48" .HexToBytes().ToPrivateKey()); byte[] pubKey = CryptoUtils.EncodeCompressed(keyPair.PublicKey); ECDSAPublicKey[] allKeys = { keyPair.PublicKey }; var keygen = new TrustlessKeygen(keyPair, allKeys, 0, 0); var cycle = 0.ToUInt256(); ValueMessage value; // call ChangeValidators method { byte[][] validators = { pubKey }; var input = ContractEncoder.Encode(GovernanceInterface.MethodChangeValidators, cycle, validators); var call = contractRegisterer !.DecodeContract(context, ContractRegisterer.GovernanceContract, input); Assert.IsNotNull(call); var frame = new SystemContractExecutionFrame(call !, context, input, 100_000_000); Assert.AreEqual(ExecutionStatus.Ok, contract.ChangeValidators(cycle, validators, frame)); } // check correct validator { var input = ContractEncoder.Encode(GovernanceInterface.MethodIsNextValidator, pubKey); var call = contractRegisterer.DecodeContract(context, ContractRegisterer.GovernanceContract, input); Assert.IsNotNull(call); var frame = new SystemContractExecutionFrame(call !, context, input, 100_000_000); Assert.AreEqual(ExecutionStatus.Ok, contract.IsNextValidator(pubKey, frame)); Assert.AreEqual(frame.ReturnValue, 1.ToUInt256().ToBytes()); } // check incorrect validator { byte[] incorrectPubKey = pubKey.Reverse().ToArray(); var input = ContractEncoder.Encode(GovernanceInterface.MethodIsNextValidator, incorrectPubKey); var call = contractRegisterer.DecodeContract(context, ContractRegisterer.GovernanceContract, input); Assert.IsNotNull(call); var frame = new SystemContractExecutionFrame(call !, context, input, 100_000_000); Assert.AreEqual(ExecutionStatus.Ok, contract.IsNextValidator(incorrectPubKey, frame)); Assert.AreEqual(frame.ReturnValue, 0.ToUInt256().ToBytes()); } // call commit { var commitMessage = keygen.StartKeygen(); byte[] commitment = commitMessage.Commitment.ToBytes(); byte[][] encryptedRows = commitMessage.EncryptedRows; var input = ContractEncoder.Encode(GovernanceInterface.MethodKeygenCommit, cycle, commitment, encryptedRows); var call = contractRegisterer.DecodeContract(context, ContractRegisterer.GovernanceContract, input); Assert.IsNotNull(call); var frame = new SystemContractExecutionFrame(call !, context, input, 100_000_000); Assert.AreEqual(ExecutionStatus.Ok, contract.KeyGenCommit(cycle, commitment, encryptedRows, frame)); // several calls is ok Assert.AreEqual(ExecutionStatus.Ok, contract.KeyGenCommit(cycle, commitment, encryptedRows, frame)); // set keygen state value = keygen.HandleCommit(0, commitMessage); } // send value { var proposer = new BigInteger(0).ToUInt256(); var input = ContractEncoder.Encode(GovernanceInterface.MethodKeygenSendValue, cycle, proposer, value.EncryptedValues); var call = contractRegisterer.DecodeContract(context, ContractRegisterer.GovernanceContract, input); Assert.IsNotNull(call); var frame = new SystemContractExecutionFrame(call !, context, input, 100_000_000); Assert.AreEqual(ExecutionStatus.Ok, contract.KeyGenSendValue(cycle, proposer, value.EncryptedValues, frame)); // set keygen state Assert.IsTrue(keygen.HandleSendValue(0, value)); Assert.IsTrue(keygen.Finished()); } // confirm { ThresholdKeyring?keyring = keygen.TryGetKeys(); Assert.IsNotNull(keyring); var input = ContractEncoder.Encode(GovernanceInterface.MethodKeygenConfirm, cycle, keyring !.Value.TpkePublicKey.ToBytes(), keyring !.Value.ThresholdSignaturePublicKeySet.Keys.Select(key => key.ToBytes()).ToArray()); var call = contractRegisterer.DecodeContract(context, ContractRegisterer.GovernanceContract, input); Assert.IsNotNull(call); var frame = new SystemContractExecutionFrame(call !, context, input, 100_000_000); Assert.AreEqual(ExecutionStatus.Ok, contract.KeyGenConfirm(cycle, keyring !.Value.TpkePublicKey.ToBytes(), keyring !.Value.ThresholdSignaturePublicKeySet.Keys.Select(key => key.ToBytes()).ToArray(), frame)); // set keygen state Assert.IsTrue(keygen.HandleConfirm(keyring !.Value.TpkePublicKey, keyring !.Value.ThresholdSignaturePublicKeySet)); } // check no validators in storage Assert.Throws <ConsensusStateNotPresentException>(() => context.Snapshot.Validators.GetValidatorsPublicKeys()); // finish cycle { var input = ContractEncoder.Encode(GovernanceInterface.MethodFinishCycle, cycle); var call = contractRegisterer.DecodeContract(context, ContractRegisterer.GovernanceContract, input); Assert.IsNotNull(call); var frame = new SystemContractExecutionFrame(call !, context, input, 100_000_000); // should fail due to the invalid block Assert.AreEqual(ExecutionStatus.ExecutionHalted, contract.FinishCycle(cycle, frame)); // set next cycle block number in frame: frame.InvocationContext.Receipt.Block = StakingContract.CycleDuration; Assert.AreEqual(ExecutionStatus.Ok, contract.FinishCycle(cycle, frame)); } // check new validators in storage var newValidators = context.Snapshot.Validators.GetValidatorsPublicKeys().ToArray(); Assert.AreEqual(newValidators.Count(), 1); Assert.AreEqual(newValidators[0], keyPair.PublicKey); }