Skip to content

Commit

Permalink
fix: issues after merge
Browse files Browse the repository at this point in the history
  • Loading branch information
Brian Faust committed Nov 22, 2019
1 parent 59a8edd commit 8bea924
Show file tree
Hide file tree
Showing 11 changed files with 247 additions and 344 deletions.
99 changes: 0 additions & 99 deletions __tests__/integration/core-api/handlers/businesses.test.ts

This file was deleted.

1 change: 1 addition & 0 deletions __tests__/integration/core-api/handlers/peers.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ beforeAll(async () => {
peerMock.port = mock.port;
peerMock.version = mock.version;
peerMock.latency = mock.latency;
peerMock.state.height = mock.state.height;

return peerMock;
});
Expand Down
7 changes: 6 additions & 1 deletion __tests__/integration/core-api/handlers/wallets.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,8 @@ beforeAll(async () => {

address = Identities.Address.fromPublicKey(publicKey);
address2 = Identities.Address.fromPublicKey("02def27da9336e7fbf63131b8d7e5c9f45b296235db035f1f4242c507398f0f21d");

validIdentifiers.address = address;
});

afterAll(async () => await tearDown());
Expand Down Expand Up @@ -93,15 +95,18 @@ describe("API 2.0 - Wallets", () => {
const response = await api.request("GET", `wallets/${value}`);
expect(response).toBeSuccessfulResponse();
expect(response.data.data).toBeObject();

const wallet = response.data.data;

api.expectWallet(wallet);

expect(wallet[identifier]).toBe(value);
}
});

it("should fail to GET a wallet by the given invalid identifier", async () => {
for (const value of invalidIdentifiers) {
api.expectError(await api.request("GET", `wallets/${value}`), 422);
api.expectError(await api.request("GET", `wallets/${value}`), 400);
}
});

Expand Down
8 changes: 2 additions & 6 deletions packages/core-api/src/controllers/wallets.ts
Original file line number Diff line number Diff line change
Expand Up @@ -168,14 +168,10 @@ export class WalletsController extends Controller {
}

private findWallet(id: string): Contracts.State.Wallet | Boom<null> {
let wallet: Contracts.State.Wallet | undefined;

try {
wallet = this.walletRepository.findByScope(Contracts.State.SearchScope.Wallets, id);
return this.walletRepository.findByScope(Contracts.State.SearchScope.Wallets, id);
} catch (error) {
return Boom.notFound("Wallet not found");
throw Boom.notFound("Wallet not found");
}

return wallet;
}
}
2 changes: 1 addition & 1 deletion packages/core-database/src/defaults.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ export const defaults = {
password: process.env.CORE_DB_PASSWORD || "password",
entityPrefix: "public.",
synchronize: false,
logging: true,
logging: false,
},
estimateTotalCount: !process.env.CORE_API_NO_ESTIMATED_TOTAL_COUNT,
};
28 changes: 14 additions & 14 deletions packages/core-p2p/src/network-monitor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -330,7 +330,10 @@ export class NetworkMonitor implements Contracts.P2P.INetworkMonitor {
return { forked: true, blocksToRollback: Math.min(lastBlock.data.height - highestCommonHeight, 5000) };
}

public async downloadBlocksFromHeight(fromBlockHeight: number, maxParallelDownloads = 10): Promise<Interfaces.IBlockData[]> {
public async downloadBlocksFromHeight(
fromBlockHeight: number,
maxParallelDownloads = 10,
): Promise<Interfaces.IBlockData[]> {
const peersAll: Contracts.P2P.Peer[] = this.storage.getPeers();

if (peersAll.length === 0) {
Expand All @@ -343,7 +346,7 @@ export class NetworkMonitor implements Contracts.P2P.INetworkMonitor {
if (peersNotForked.length === 0) {
this.logger.error(
`Could not download blocks: We have ${peersAll.length} peer(s) but all ` +
`of them are on a different chain than us`
`of them are on a different chain than us`,
);
return [];
}
Expand Down Expand Up @@ -390,10 +393,7 @@ export class NetworkMonitor implements Contracts.P2P.INetworkMonitor {
// As a first peer to try, pick such a peer that different jobs use different peers.
// If that peer fails then pick randomly from the remaining peers that have not
// been first-attempt for any job.
const peersToTry = [
peersNotForked[i],
...Utils.shuffle(peersNotForked.slice(chunksToDownload))
];
const peersToTry = [peersNotForked[i], ...Utils.shuffle(peersNotForked.slice(chunksToDownload))];

for (peer of peersToTry) {
peerPrint = `${peer.ip}:${peer.port}`;
Expand All @@ -402,30 +402,28 @@ export class NetworkMonitor implements Contracts.P2P.INetworkMonitor {

if (blocks.length === chunkSize || (isLastChunk && blocks.length > 0)) {
this.logger.debug(
`Downloaded blocks ${blocksRange} (${blocks.length}) ` +
`from ${peerPrint}`
`Downloaded blocks ${blocksRange} (${blocks.length}) ` + `from ${peerPrint}`,
);
downloadResults[i] = blocks;
return;
}
} catch (error) {
this.logger.info(
`Failed to download blocks ${blocksRange} from ${peerPrint}: ${error.message}`
`Failed to download blocks ${blocksRange} from ${peerPrint}: ${error.message}`,
);
}

if (someJobFailed) {
this.logger.info(
`Giving up on trying to download blocks ${blocksRange}: ` +
`another download job failed`
`Giving up on trying to download blocks ${blocksRange}: ` + `another download job failed`,
);
}
}

someJobFailed = true;
throw new Error(
`Could not download blocks ${blocksRange} from any of ${peersToTry.length} ` +
`peer(s). Last attempt returned ${blocks.length} block(s) from peer ${peerPrint}.`
`peer(s). Last attempt returned ${blocks.length} block(s) from peer ${peerPrint}.`,
);
});

Expand Down Expand Up @@ -459,8 +457,10 @@ export class NetworkMonitor implements Contracts.P2P.INetworkMonitor {
}
// Save any downloaded chunks that are higher than a failed chunk for later reuse.
for (i++; i < chunksToDownload; i++) {
if (downloadResults[i] !== undefined &&
Object.keys(this.downloadedChunksCache).length <= this.downloadedChunksCacheMax) {
if (
downloadResults[i] !== undefined &&
Object.keys(this.downloadedChunksCache).length <= this.downloadedChunksCacheMax
) {
this.downloadedChunksCache[fromBlockHeight + chunkSize * i] = downloadResults[i];
}
}
Expand Down
21 changes: 12 additions & 9 deletions packages/core-p2p/src/peer-communicator.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,17 @@ import { PeerPingTimeoutError, PeerStatusResponseError, PeerVerificationFailedEr
import { PeerConfig, PeerPingResponse } from "./interfaces";
import { PeerConnector } from "./peer-connector";
import { PeerVerifier } from "./peer-verifier";
import { createSchemas } from "./schemas";
import { replySchemas } from "./schemas";
import { isValidVersion, socketEmit, buildRateLimiter } from "./utils";
import { RateLimiter } from "./rate-limiter";
import { constants } from "./constants";

// todo: review the implementation
@Container.injectable()
export class PeerCommunicator {
@Container.inject(Container.Identifiers.Application)
private readonly app!: Contracts.Kernel.Application;

@Container.inject(Container.Identifiers.LogService)
private readonly logger!: Contracts.Kernel.Log.Logger;

Expand All @@ -26,9 +29,9 @@ export class PeerCommunicator {
@Container.inject(Container.Identifiers.PeerConnector)
private readonly connector!: PeerConnector;

private outgoingRateLimiter: RateLimiter;
private outgoingRateLimiter!: RateLimiter;

constructor(@Container.inject(Container.Identifiers.Application) private readonly app: Contracts.Kernel.Application) {
public init() {
this.outgoingRateLimiter = buildRateLimiter({
// White listing anybody here means we would not throttle ourselves when sending
// them requests, ie we could spam them.
Expand All @@ -38,8 +41,7 @@ export class PeerCommunicator {
.get<Providers.ServiceProviderRepository>(Container.Identifiers.ServiceProviderRepository)
.get("@arkecosystem/core-p2p")
.config()
.all()
.rateLimit
.all().rateLimit,
});
}

Expand Down Expand Up @@ -110,7 +112,8 @@ export class PeerCommunicator {
} else {
this.logger.warning(
`Disconnecting from ${peerHostPort}: ` +
`nethash mismatch: our=${ourNethash}, his=${hisNethash}.`);
`nethash mismatch: our=${ourNethash}, his=${hisNethash}.`,
);
this.emitter.dispatch("internal.p2p.disconnectPeer", { peer });
}
}
Expand Down Expand Up @@ -193,7 +196,7 @@ export class PeerCommunicator {
.get("@arkecosystem/core-p2p")
.config()
.get<number>("getBlocksTimeout"),
maxPayload
maxPayload,
);

if (!peerBlocks) {
Expand Down Expand Up @@ -225,7 +228,7 @@ export class PeerCommunicator {
}

private validateReply(peer: Contracts.P2P.Peer, reply: any, endpoint: string): boolean {
const schema = createSchemas(this.app).replySchemas[endpoint];
const schema = replySchemas[endpoint];
if (schema === undefined) {
this.logger.error(`Can't validate reply from "${endpoint}": none of the predefined schemas matches.`);
return false;
Expand Down Expand Up @@ -284,7 +287,7 @@ export class PeerCommunicator {
const msBeforeReCheck = 1000;
while (await this.outgoingRateLimiter.hasExceededRateLimit(peer.ip, event)) {
this.logger.debug(
`Throttling outgoing requests to ${peer.ip}/${event} to avoid triggering their rate limit`
`Throttling outgoing requests to ${peer.ip}/${event} to avoid triggering their rate limit`,
);
await delay(msBeforeReCheck);
}
Expand Down
Loading

0 comments on commit 8bea924

Please sign in to comment.