Skip to content

Commit 94de19c

Browse files
perf: use CDP to find open DevTools pages. (#1150)
This should reduce the flakiness and issue with finding open DevTools tabs for the corresponding pages. Currently we do multiple loops over all the targets, so this should have a nice performance improvement when multiple pages are opened.
1 parent a06d8e6 commit 94de19c

File tree

3 files changed

+16
-165
lines changed

3 files changed

+16
-165
lines changed

src/DevtoolsUtils.ts

Lines changed: 0 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -15,61 +15,6 @@ import type {
1515
Target as PuppeteerTarget,
1616
} from './third_party/index.js';
1717

18-
export function extractUrlLikeFromDevToolsTitle(
19-
title: string,
20-
): string | undefined {
21-
const match = title.match(new RegExp(`DevTools - (.*)`));
22-
return match?.[1] ?? undefined;
23-
}
24-
25-
export function urlsEqual(url1: string, url2: string): boolean {
26-
const normalizedUrl1 = normalizeUrl(url1);
27-
const normalizedUrl2 = normalizeUrl(url2);
28-
return normalizedUrl1 === normalizedUrl2;
29-
}
30-
31-
/**
32-
* For the sake of the MCP server, when we determine if two URLs are equal we
33-
* remove some parts:
34-
*
35-
* 1. We do not care about the protocol.
36-
* 2. We do not care about trailing slashes.
37-
* 3. We do not care about "www".
38-
* 4. We ignore the hash parts.
39-
*
40-
* For example, if the user types "record a trace on foo.com", we would want to
41-
* match a tab in the connected Chrome instance that is showing "www.foo.com/"
42-
*/
43-
function normalizeUrl(url: string): string {
44-
let result = url.trim();
45-
46-
// Remove protocols
47-
if (result.startsWith('https://')) {
48-
result = result.slice(8);
49-
} else if (result.startsWith('http://')) {
50-
result = result.slice(7);
51-
}
52-
53-
// Remove 'www.'. This ensures that we find the right URL regardless of if the user adds `www` or not.
54-
if (result.startsWith('www.')) {
55-
result = result.slice(4);
56-
}
57-
58-
// We use target URLs to locate DevTools but those often do
59-
// no include hash.
60-
const hashIdx = result.lastIndexOf('#');
61-
if (hashIdx !== -1) {
62-
result = result.slice(0, hashIdx);
63-
}
64-
65-
// Remove trailing slash
66-
if (result.endsWith('/')) {
67-
result = result.slice(0, -1);
68-
}
69-
70-
return result;
71-
}
72-
7318
/**
7419
* A mock implementation of an issues manager that only implements the methods
7520
* that are actually used by the IssuesAggregator

src/McpContext.ts

Lines changed: 15 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -8,11 +8,7 @@ import fs from 'node:fs/promises';
88
import path from 'node:path';
99

1010
import type {TargetUniverse} from './DevtoolsUtils.js';
11-
import {
12-
extractUrlLikeFromDevToolsTitle,
13-
UniverseManager,
14-
urlsEqual,
15-
} from './DevtoolsUtils.js';
11+
import {UniverseManager} from './DevtoolsUtils.js';
1612
import {McpPage} from './McpPage.js';
1713
import type {ListenerMap, UncaughtError} from './PageCollector.js';
1814
import {NetworkCollector, ConsoleCollector} from './PageCollector.js';
@@ -653,37 +649,21 @@ export class McpContext implements Context {
653649
async detectOpenDevToolsWindows() {
654650
this.logger('Detecting open DevTools windows');
655651
const {pages} = await this.#getAllPages();
656-
// Clear all devToolsPage references before re-detecting.
657-
for (const mcpPage of this.#mcpPages.values()) {
658-
mcpPage.devToolsPage = undefined;
659-
}
660-
for (const devToolsPage of pages) {
661-
if (devToolsPage.url().startsWith('devtools://')) {
662-
try {
663-
this.logger('Calling getTargetInfo for ' + devToolsPage.url());
664-
const data = await devToolsPage
665-
// @ts-expect-error no types for _client().
666-
._client()
667-
.send('Target.getTargetInfo');
668-
const devtoolsPageTitle = data.targetInfo.title;
669-
const urlLike = extractUrlLikeFromDevToolsTitle(devtoolsPageTitle);
670-
if (!urlLike) {
671-
continue;
672-
}
673-
// TODO: lookup without a loop.
674-
for (const page of this.#pages) {
675-
if (urlsEqual(page.url(), urlLike)) {
676-
const mcpPage = this.#mcpPages.get(page);
677-
if (mcpPage) {
678-
mcpPage.devToolsPage = devToolsPage;
679-
}
680-
}
681-
}
682-
} catch (error) {
683-
this.logger('Issue occurred while trying to find DevTools', error);
652+
653+
await Promise.all(
654+
pages.map(async page => {
655+
const mcpPage = this.#mcpPages.get(page);
656+
if (!mcpPage) {
657+
return;
684658
}
685-
}
686-
}
659+
660+
if (await page.hasDevTools()) {
661+
mcpPage.devToolsPage = await page.openDevTools();
662+
} else {
663+
mcpPage.devToolsPage = undefined;
664+
}
665+
}),
666+
);
687667
}
688668

689669
getExtensionServiceWorkers(): ExtensionServiceWorker[] {

tests/DevtoolsUtils.test.ts

Lines changed: 1 addition & 75 deletions
Original file line numberDiff line numberDiff line change
@@ -9,11 +9,7 @@ import {afterEach, describe, it} from 'node:test';
99

1010
import sinon from 'sinon';
1111

12-
import {
13-
extractUrlLikeFromDevToolsTitle,
14-
urlsEqual,
15-
UniverseManager,
16-
} from '../src/DevtoolsUtils.js';
12+
import {UniverseManager} from '../src/DevtoolsUtils.js';
1713
import {DevTools} from '../src/third_party/index.js';
1814
import type {Browser, Target} from '../src/third_party/index.js';
1915

@@ -24,76 +20,6 @@ import {
2420
withBrowser,
2521
} from './utils.js';
2622

27-
describe('extractUrlFromDevToolsTitle', () => {
28-
it('deals with no trailing /', () => {
29-
assert.strictEqual(
30-
extractUrlLikeFromDevToolsTitle('DevTools - example.com'),
31-
'example.com',
32-
);
33-
});
34-
it('deals with a trailing /', () => {
35-
assert.strictEqual(
36-
extractUrlLikeFromDevToolsTitle('DevTools - example.com/'),
37-
'example.com/',
38-
);
39-
});
40-
it('deals with www', () => {
41-
assert.strictEqual(
42-
extractUrlLikeFromDevToolsTitle('DevTools - www.example.com/'),
43-
'www.example.com/',
44-
);
45-
});
46-
it('deals with complex url', () => {
47-
assert.strictEqual(
48-
extractUrlLikeFromDevToolsTitle(
49-
'DevTools - www.example.com/path.html?a=b#3',
50-
),
51-
'www.example.com/path.html?a=b#3',
52-
);
53-
});
54-
});
55-
56-
describe('urlsEqual', () => {
57-
it('ignores trailing slashes', () => {
58-
assert.strictEqual(
59-
urlsEqual('https://google.com/', 'https://google.com'),
60-
true,
61-
);
62-
});
63-
64-
it('ignores www', () => {
65-
assert.strictEqual(
66-
urlsEqual('https://google.com/', 'https://www.google.com'),
67-
true,
68-
);
69-
});
70-
71-
it('ignores protocols', () => {
72-
assert.strictEqual(
73-
urlsEqual('https://google.com/', 'http://www.google.com'),
74-
true,
75-
);
76-
});
77-
78-
it('does not ignore other subdomains', () => {
79-
assert.strictEqual(
80-
urlsEqual('https://google.com/', 'https://photos.google.com'),
81-
false,
82-
);
83-
});
84-
85-
it('ignores hash', () => {
86-
assert.strictEqual(
87-
urlsEqual('https://google.com/#', 'http://www.google.com'),
88-
true,
89-
);
90-
assert.strictEqual(
91-
urlsEqual('https://google.com/#21', 'http://www.google.com#12'),
92-
true,
93-
);
94-
});
95-
});
96-
9723
describe('UniverseManager', () => {
9824
afterEach(() => {
9925
sinon.restore();

0 commit comments

Comments
 (0)