跳到主要内容

offscreen 离屏文档

官方文档

允许用户在后台线程中创建离屏画布

权限设置

"permissions": [
"offscreen"
],

方法

createDocument() 创建离屏文档

创建离屏文档

type Reason = (
"TESTING" | // 测试目的
"AUDIO_PLAYBACK" | // 播放音频
"IFRAME_SCRIPTING" | // 嵌入 iframe 并编写脚本
"DOM_SCRAPING" | // 抓取 DOM 以提取信息
"BLOBS" | // 与 Blob 对象(包括 URL.createObjectURL() )进行交互
"DOM_PARSER" | // 解析 DOM, new DomParser()
"USER_MEDIA" | // 与来自用户媒体的媒体流进行交互 getUserMedia()
"DISPLAY_MEDIA" | // 与来自显示媒体的媒体流进行交互(例如 getDisplayMedia()
"WEB_RTC" | // RTC
"CLIPBOARD" | // 剪贴板
"LOCAL_STORAGE" | // 本地存储
"WORKERS" | // Web Workers
"BATTERY_STATUS" | // 电池状态,navigator.getBattery()
"MATCH_MEDIA" | // window.matchMedia
"GEOLOCATION" // 地理位置, navigator.geolocation()
);
type CreateParameters = {
// 开发人员提供的字符串,更详细地解释了对后台上下文的需求
justification?: string;
// 用于创建离屏文档的原因,列出了应用场景
reasons?: Reason[];

// 相对于当前文档的URL
url: string;
}

chrome.offscreen.createDocument(parameters: CreateParameters,callback?: function)

closeDocument() 关闭离屏文档

chrome.offscreen.closeDocument(callback?: function)

示例

GEO定位

// background.js
chrome.action.onClicked.addListener(async () => {
await chrome.offscreen.createDocument({
url: '/offscreen.html',
reasons: [
chrome.offscreen.Reason.GEOLOCATION ||
chrome.offscreen.Reason.DOM_SCRAPING
],
justification: 'add justification for geolocation use here'
});

const geolocation = await chrome.runtime.sendMessage({
type: 'get-geolocation',
target: 'offscreen'
});

await chrome.offscreen.closeDocument();
return geolocation;
});
<!-- offscreen.html -->
<!DOCTYPE html>
<script src="offscreen.js"></script>
// offscreen.js
chrome.runtime.onMessage.addListener((message, sender, sendResponse) => {
if (message.target !== 'offscreen') {
return false;
}

if (message.type !== 'get-geolocation') {
console.warn(`Unexpected message type received: '${message.type}'.`);
return;
}

getGeolocation().then((geolocation) => {
console.log('getGeolocation', geolocation);
sendResponse(geolocation)
});
return true;
})

function clone(obj) {
const copy = {};
if (obj === null || !(obj instanceof Object)) {
return obj;
} else {
for (const p in obj) {
copy[p] = clone(obj[p]);
}
}
return copy;
}

async function getGeolocation() {
return new Promise((resolve, reject) => {
navigator.geolocation.getCurrentPosition(
(loc) => resolve(clone(loc)),
(err) => reject(err)
);
});
}

复制粘贴

// background.js
chrome.action.onClicked.addListener(async () => {
await chrome.offscreen.createDocument({
url: 'offscreen.html',
reasons: [chrome.offscreen.Reason.CLIPBOARD],
justification: 'Write text to the clipboard.'
});

chrome.runtime.sendMessage({
type: 'copy-data-to-clipboard',
target: 'offscreen-doc',
data: value
});
});
<!-- offscreen.html -->
<!DOCTYPE html>
<textarea id="text"></textarea>
<script src="offscreen.js"></script>
// offscreen.js
chrome.runtime.onMessage.addListener(async (message) => {
if (message.target !== 'offscreen-doc') {
return;
}

switch (message.type) {
case 'copy-data-to-clipboard':
handleClipboardWrite(message.data);
break;
default:
console.warn(`Unexpected message type received: '${message.type}'.`);
}
});

const textEl = document.querySelector('#text');

async function handleClipboardWrite(data) {
try {
if (typeof data !== 'string') {
throw new TypeError(
`Value provided must be a 'string', got '${typeof data}'.`
);
}

textEl.value = data;
textEl.select();
document.execCommand('copy');
} finally {
window.close();
}
}

解析DOM

// background.js
chrome.action.onClicked.addListener(async () => {
await chrome.offscreen.createDocument({
url: 'offscreen.html',
reasons: [chrome.offscreen.Reason.DOM_PARSER],
justification: 'Parse the DOM.'
});

const dom = await chrome.runtime.sendMessage({
type: 'add-exclamationmarks-to-headings',
target: 'offscreen',
data: '<html><head></head><body><h1>Hello World</h1></body></html>'
});

chrome.offscreen.closeDocument();
});

chrome.runtime.onMessage.addListener(async (message) => {
if (message.target !== 'background') {
return;
}

switch (message.type) {
case 'add-exclamationmarks-result':
console.log('Received dom', message.data);
chrome.offscreen.closeDocument();
break;
default:
console.warn(`Unexpected message type received: '${message.type}'.`);
}
});
<!-- offscreen.html -->
<!DOCTYPE html>
<script src="offscreen.js"></script>
// offscreen.js
chrome.runtime.onMessage.addListener(async (message) => {
if (message.target !== 'offscreen') {
return;
}

switch (message.type) {
case 'add-exclamationmarks-to-headings':
cconst parser = new DOMParser();
const document = parser.parseFromString(htmlString, 'text/html');
document.querySelectorAll('h1').forEach((heading) => (heading.textContent = heading.textContent + '!!!'));
chrome.runtime.sendMessage({
type: 'add-exclamationmarks-result',
target: 'background',
data: document.documentElement.outerHTML
});
break;
default:
console.warn(`Unexpected message type received: '${message.type}'.`);
}
});

网页录制

// background.js
chrome.action.onClicked.addListener(async (tab) => {
const existingContexts = await chrome.runtime.getContexts({});
let recording = false;

const offscreenDocument = existingContexts.find(
(c) => c.contextType === 'OFFSCREEN_DOCUMENT'
);

// If an offscreen document is not already open, create one.
if (!offscreenDocument) {
// Create an offscreen document.
await chrome.offscreen.createDocument({
url: 'offscreen.html',
reasons: ['USER_MEDIA'],
justification: 'Recording from chrome.tabCapture API'
});
} else {
recording = offscreenDocument.documentUrl.endsWith('#recording');
}

if (recording) {
chrome.runtime.sendMessage({
type: 'stop-recording',
target: 'offscreen'
});
chrome.action.setIcon({ path: 'icons/not-recording.png' });
return;
}

// Get a MediaStream for the active tab.
const streamId = await chrome.tabCapture.getMediaStreamId({
targetTabId: tab.id
});

// Send the stream ID to the offscreen document to start recording.
chrome.runtime.sendMessage({
type: 'start-recording',
target: 'offscreen',
data: streamId
});

chrome.action.setIcon({ path: '/icons/recording.png' });
});
<!-- offscreen.html -->
<!DOCTYPE html>
<script src="offscreen.js"></script>
// offscreen.js
chrome.runtime.onMessage.addListener(async (message) => {
if (message.target === 'offscreen') {
switch (message.type) {
case 'start-recording':
startRecording(message.data);
break;
case 'stop-recording':
stopRecording();
break;
default:
throw new Error('Unrecognized message:', message.type);
}
}
});

let recorder;
let data = [];

async function startRecording(streamId) {
if (recorder?.state === 'recording') {
throw new Error('Called startRecording while recording is in progress.');
}

const media = await navigator.mediaDevices.getUserMedia({
audio: {
mandatory: {
chromeMediaSource: 'tab',
chromeMediaSourceId: streamId
}
},
video: {
mandatory: {
chromeMediaSource: 'tab',
chromeMediaSourceId: streamId
}
}
});

const output = new AudioContext();
const source = output.createMediaStreamSource(media);
source.connect(output.destination);

recorder = new MediaRecorder(media, { mimeType: 'video/webm' });
recorder.ondataavailable = (event) => data.push(event.data);
recorder.onstop = () => {
const blob = new Blob(data, { type: 'video/webm' });
window.open(URL.createObjectURL(blob), '_blank');

recorder = undefined;
data = [];
};
recorder.start();

window.location.hash = 'recording';
}

async function stopRecording() {
recorder.stop();

recorder.stream.getTracks().forEach((t) => t.stop());

window.location.hash = '';
}