From 9ecbc123963e09ebc0be834b69d8780cd27ef9c2 Mon Sep 17 00:00:00 2001 From: Stanislav Lashmanov <slashmanov@gitlab.com> Date: Mon, 6 Mar 2023 15:56:02 +0000 Subject: [PATCH] Blame page streaming HTML Streaming is now used on the full Blame page view. How streaming works: https://gitlab.com/gitlab-org/frontend/rfcs/-/issues/101 Changelog: added --- .../javascripts/blame/streaming/index.js | 56 +++++ .../pages/projects/blame/show/index.js | 7 +- .../javascripts/streaming/chunk_writer.js | 144 ++++++++++++ app/assets/javascripts/streaming/constants.js | 9 + .../streaming/handle_streamed_anchor_link.js | 26 +++ .../javascripts/streaming/html_stream.js | 33 +++ app/assets/javascripts/streaming/polyfills.js | 5 + .../streaming/rate_limit_stream_requests.js | 87 +++++++ .../javascripts/streaming/render_balancer.js | 36 +++ .../streaming/render_html_streams.js | 40 ++++ app/assets/stylesheets/framework/files.scss | 28 +++ app/controllers/projects/blame_controller.rb | 38 +++- app/services/projects/blame_service.rb | 42 +++- app/views/layouts/_head.html.haml | 1 + app/views/projects/blame/show.html.haml | 27 ++- .../development/blame_page_streaming.yml | 8 + config/routes/repository.rb | 1 + locale/gitlab.pot | 18 +- package.json | 2 + spec/features/projects/blobs/blame_spec.rb | 47 +++- .../frontend/__helpers__/shared_test_setup.js | 4 + spec/frontend/__mocks__/lodash/debounce.js | 19 +- spec/frontend/__mocks__/lodash/throttle.js | 2 +- spec/frontend/blame/streaming/index_spec.js | 110 +++++++++ spec/frontend/streaming/chunk_writer_spec.js | 214 ++++++++++++++++++ .../handle_streamed_anchor_link_spec.js | 132 +++++++++++ spec/frontend/streaming/html_stream_spec.js | 46 ++++ .../rate_limit_stream_requests_spec.js | 155 +++++++++++++ .../streaming/render_balancer_spec.js | 69 ++++++ .../streaming/render_html_streams_spec.js | 96 ++++++++ yarn.lock | 5 + 31 files changed, 1473 insertions(+), 34 deletions(-) create mode 100644 app/assets/javascripts/blame/streaming/index.js create mode 100644 app/assets/javascripts/streaming/chunk_writer.js create mode 100644 app/assets/javascripts/streaming/constants.js create mode 100644 app/assets/javascripts/streaming/handle_streamed_anchor_link.js create mode 100644 app/assets/javascripts/streaming/html_stream.js create mode 100644 app/assets/javascripts/streaming/polyfills.js create mode 100644 app/assets/javascripts/streaming/rate_limit_stream_requests.js create mode 100644 app/assets/javascripts/streaming/render_balancer.js create mode 100644 app/assets/javascripts/streaming/render_html_streams.js create mode 100644 config/feature_flags/development/blame_page_streaming.yml create mode 100644 spec/frontend/blame/streaming/index_spec.js create mode 100644 spec/frontend/streaming/chunk_writer_spec.js create mode 100644 spec/frontend/streaming/handle_streamed_anchor_link_spec.js create mode 100644 spec/frontend/streaming/html_stream_spec.js create mode 100644 spec/frontend/streaming/rate_limit_stream_requests_spec.js create mode 100644 spec/frontend/streaming/render_balancer_spec.js create mode 100644 spec/frontend/streaming/render_html_streams_spec.js diff --git a/app/assets/javascripts/blame/streaming/index.js b/app/assets/javascripts/blame/streaming/index.js new file mode 100644 index 0000000000000..a74e01b6423d6 --- /dev/null +++ b/app/assets/javascripts/blame/streaming/index.js @@ -0,0 +1,56 @@ +import { renderHtmlStreams } from '~/streaming/render_html_streams'; +import { handleStreamedAnchorLink } from '~/streaming/handle_streamed_anchor_link'; +import { createAlert } from '~/flash'; +import { __ } from '~/locale'; +import { rateLimitStreamRequests } from '~/streaming/rate_limit_stream_requests'; +import { toPolyfillReadable } from '~/streaming/polyfills'; + +export async function renderBlamePageStreams(firstStreamPromise) { + const element = document.querySelector('#blame-stream-container'); + + if (!element || !firstStreamPromise) return; + + const stopAnchorObserver = handleStreamedAnchorLink(element); + const { dataset } = document.querySelector('#blob-content-holder'); + const totalExtraPages = parseInt(dataset.totalExtraPages, 10); + const { pagesUrl } = dataset; + + const remainingStreams = rateLimitStreamRequests({ + factory: (index) => { + const url = new URL(pagesUrl); + // page numbers start with 1 + // the first page is already rendered in the document + // the second page is passed with the 'firstStreamPromise' + url.searchParams.set('page', index + 3); + return fetch(url).then((response) => toPolyfillReadable(response.body)); + }, + // we don't want to overload gitaly with concurrent requests + // https://gitlab.com/gitlab-org/gitlab/-/issues/391842#note_1281695095 + // using 5 as a good starting point + maxConcurrentRequests: 5, + total: totalExtraPages, + }); + + try { + await renderHtmlStreams( + [firstStreamPromise.then(toPolyfillReadable), ...remainingStreams], + element, + ); + } catch (error) { + createAlert({ + message: __('Blame could not be loaded as a single page.'), + primaryButton: { + text: __('View blame as separate pages'), + clickHandler() { + const newUrl = new URL(window.location); + newUrl.searchParams.delete('streaming'); + window.location.href = newUrl; + }, + }, + }); + throw error; + } finally { + stopAnchorObserver(); + document.querySelector('#blame-stream-loading').remove(); + } +} diff --git a/app/assets/javascripts/pages/projects/blame/show/index.js b/app/assets/javascripts/pages/projects/blame/show/index.js index 1e4b9de90f267..f0fdd18c828da 100644 --- a/app/assets/javascripts/pages/projects/blame/show/index.js +++ b/app/assets/javascripts/pages/projects/blame/show/index.js @@ -1,5 +1,10 @@ import initBlob from '~/pages/projects/init_blob'; import redirectToCorrectPage from '~/blame/blame_redirect'; +import { renderBlamePageStreams } from '~/blame/streaming'; -redirectToCorrectPage(); +if (new URLSearchParams(window.location.search).get('streaming')) { + renderBlamePageStreams(window.blamePageStream); +} else { + redirectToCorrectPage(); +} initBlob(); diff --git a/app/assets/javascripts/streaming/chunk_writer.js b/app/assets/javascripts/streaming/chunk_writer.js new file mode 100644 index 0000000000000..4bbd0a5f8438f --- /dev/null +++ b/app/assets/javascripts/streaming/chunk_writer.js @@ -0,0 +1,144 @@ +import { throttle } from 'lodash'; +import { RenderBalancer } from '~/streaming/render_balancer'; +import { + BALANCE_RATE, + HIGH_FRAME_TIME, + LOW_FRAME_TIME, + MAX_CHUNK_SIZE, + MIN_CHUNK_SIZE, + TIMEOUT, +} from '~/streaming/constants'; + +const defaultConfig = { + balanceRate: BALANCE_RATE, + minChunkSize: MIN_CHUNK_SIZE, + maxChunkSize: MAX_CHUNK_SIZE, + lowFrameTime: LOW_FRAME_TIME, + highFrameTime: HIGH_FRAME_TIME, + timeout: TIMEOUT, +}; + +function concatUint8Arrays(a, b) { + const array = new Uint8Array(a.length + b.length); + array.set(a, 0); + array.set(b, a.length); + return array; +} + +// This class is used to write chunks with a balanced size +// to avoid blocking main thread for too long. +// +// A chunk can be: +// 1. Too small +// 2. Too large +// 3. Delayed in time +// +// This class resolves all these problems by +// 1. Splitting or concatenating chunks to met the size criteria +// 2. Rendering current chunk buffer immediately if enough time has passed +// +// The size of the chunk is determined by RenderBalancer, +// It measures execution time for each chunk write and adjusts next chunk size. +export class ChunkWriter { + buffer = null; + decoder = new TextDecoder('utf-8'); + timeout = null; + + constructor(htmlStream, config) { + this.htmlStream = htmlStream; + + const { balanceRate, minChunkSize, maxChunkSize, lowFrameTime, highFrameTime, timeout } = { + ...defaultConfig, + ...config, + }; + + // ensure we still render chunks over time if the size criteria is not met + this.scheduleAccumulatorFlush = throttle(this.flushAccumulator.bind(this), timeout); + + const averageSize = Math.round((maxChunkSize + minChunkSize) / 2); + this.size = Math.max(averageSize, minChunkSize); + + this.balancer = new RenderBalancer({ + lowFrameTime, + highFrameTime, + decrease: () => { + this.size = Math.round(Math.max(this.size / balanceRate, minChunkSize)); + }, + increase: () => { + this.size = Math.round(Math.min(this.size * balanceRate, maxChunkSize)); + }, + }); + } + + write(chunk) { + this.scheduleAccumulatorFlush.cancel(); + + if (this.buffer) { + this.buffer = concatUint8Arrays(this.buffer, chunk); + } else { + this.buffer = chunk; + } + + // accumulate chunks until the size is fulfilled + if (this.size > this.buffer.length) { + this.scheduleAccumulatorFlush(); + return Promise.resolve(); + } + + return this.balancedWrite(); + } + + balancedWrite() { + let cursor = 0; + + return this.balancer.render(() => { + const chunkPart = this.buffer.subarray(cursor, cursor + this.size); + // accumulate chunks until the size is fulfilled + // this is a hot path for the last chunkPart of the chunk + if (chunkPart.length < this.size) { + this.buffer = chunkPart; + this.scheduleAccumulatorFlush(); + return false; + } + + this.writeToDom(chunkPart); + + cursor += this.size; + if (cursor >= this.buffer.length) { + this.buffer = null; + return false; + } + // continue render + return true; + }); + } + + writeToDom(chunk, stream = true) { + // stream: true allows us to split chunks with multi-part words + const decoded = this.decoder.decode(chunk, { stream }); + this.htmlStream.write(decoded); + } + + flushAccumulator() { + if (this.buffer) { + this.writeToDom(this.buffer); + this.buffer = null; + } + } + + close() { + this.scheduleAccumulatorFlush.cancel(); + if (this.buffer) { + // last chunk should have stream: false to indicate the end of the stream + this.writeToDom(this.buffer, false); + this.buffer = null; + } + this.htmlStream.close(); + } + + abort() { + this.scheduleAccumulatorFlush.cancel(); + this.buffer = null; + this.htmlStream.abort(); + } +} diff --git a/app/assets/javascripts/streaming/constants.js b/app/assets/javascripts/streaming/constants.js new file mode 100644 index 0000000000000..224d93a7ac186 --- /dev/null +++ b/app/assets/javascripts/streaming/constants.js @@ -0,0 +1,9 @@ +// Lower min chunk numbers can make the page loading take incredibly long +export const MIN_CHUNK_SIZE = 128 * 1024; +export const MAX_CHUNK_SIZE = 2048 * 1024; +export const LOW_FRAME_TIME = 32; +// Tasks that take more than 50ms are considered Long +// https://web.dev/optimize-long-tasks/ +export const HIGH_FRAME_TIME = 64; +export const BALANCE_RATE = 1.2; +export const TIMEOUT = 500; diff --git a/app/assets/javascripts/streaming/handle_streamed_anchor_link.js b/app/assets/javascripts/streaming/handle_streamed_anchor_link.js new file mode 100644 index 0000000000000..315dc9bb0a0a8 --- /dev/null +++ b/app/assets/javascripts/streaming/handle_streamed_anchor_link.js @@ -0,0 +1,26 @@ +import { throttle } from 'lodash'; +import { scrollToElement } from '~/lib/utils/common_utils'; +import LineHighlighter from '~/blob/line_highlighter'; + +const noop = () => {}; + +export function handleStreamedAnchorLink(rootElement) { + // "#L100-200" → ['L100', 'L200'] + const [anchorStart, end] = window.location.hash.substring(1).split('-'); + const anchorEnd = end ? `L${end}` : anchorStart; + if (!anchorStart || document.getElementById(anchorEnd)) return noop; + + const handler = throttle((mutationList, instance) => { + if (!document.getElementById(anchorEnd)) return; + scrollToElement(document.getElementById(anchorStart)); + // eslint-disable-next-line no-new + new LineHighlighter(); + instance.disconnect(); + }, 300); + + const observer = new MutationObserver(handler); + + observer.observe(rootElement, { childList: true, subtree: true }); + + return () => observer.disconnect(); +} diff --git a/app/assets/javascripts/streaming/html_stream.js b/app/assets/javascripts/streaming/html_stream.js new file mode 100644 index 0000000000000..8182f69a60783 --- /dev/null +++ b/app/assets/javascripts/streaming/html_stream.js @@ -0,0 +1,33 @@ +import { ChunkWriter } from '~/streaming/chunk_writer'; + +export class HtmlStream { + constructor(element) { + const streamDocument = document.implementation.createHTMLDocument('stream'); + + streamDocument.open(); + streamDocument.write('<streaming-element>'); + + const virtualStreamingElement = streamDocument.querySelector('streaming-element'); + element.appendChild(document.adoptNode(virtualStreamingElement)); + + this.streamDocument = streamDocument; + } + + withChunkWriter(config) { + return new ChunkWriter(this, config); + } + + write(chunk) { + // eslint-disable-next-line no-unsanitized/method + this.streamDocument.write(chunk); + } + + close() { + this.streamDocument.write('</streaming-element>'); + this.streamDocument.close(); + } + + abort() { + this.streamDocument.close(); + } +} diff --git a/app/assets/javascripts/streaming/polyfills.js b/app/assets/javascripts/streaming/polyfills.js new file mode 100644 index 0000000000000..a9a044a3e99c6 --- /dev/null +++ b/app/assets/javascripts/streaming/polyfills.js @@ -0,0 +1,5 @@ +import { createReadableStreamWrapper } from '@mattiasbuelens/web-streams-adapter'; +import { ReadableStream as PolyfillReadableStream } from 'web-streams-polyfill'; + +// TODO: remove this when our WebStreams API reaches 100% support +export const toPolyfillReadable = createReadableStreamWrapper(PolyfillReadableStream); diff --git a/app/assets/javascripts/streaming/rate_limit_stream_requests.js b/app/assets/javascripts/streaming/rate_limit_stream_requests.js new file mode 100644 index 0000000000000..04a592baa162f --- /dev/null +++ b/app/assets/javascripts/streaming/rate_limit_stream_requests.js @@ -0,0 +1,87 @@ +const consumeReadableStream = (stream) => { + return new Promise((resolve, reject) => { + stream.pipeTo( + new WritableStream({ + close: resolve, + abort: reject, + }), + ); + }); +}; + +const wait = (timeout) => + new Promise((resolve) => { + setTimeout(resolve, timeout); + }); + +// this rate-limiting approach is specific to Web Streams +// because streams only resolve when they're fully consumed +// so we need to split each stream into two pieces: +// one for the rate-limiter (wait for all the bytes to be sent) +// another for the original consumer +export const rateLimitStreamRequests = ({ + factory, + total, + maxConcurrentRequests, + immediateCount = maxConcurrentRequests, + timeout = 0, +}) => { + if (total === 0) return []; + + const unsettled = []; + + const pushUnsettled = (promise) => { + let res; + let rej; + const consume = new Promise((resolve, reject) => { + res = resolve; + rej = reject; + }); + unsettled.push(consume); + return promise.then((stream) => { + const [first, second] = stream.tee(); + // eslint-disable-next-line promise/no-nesting + consumeReadableStream(first) + .then(() => { + unsettled.splice(unsettled.indexOf(consume), 1); + res(); + }) + .catch(rej); + return second; + }, rej); + }; + + const immediate = Array.from({ length: Math.min(immediateCount, total) }, (_, i) => + pushUnsettled(factory(i)), + ); + + const queue = []; + const flushQueue = () => { + const promises = + unsettled.length > maxConcurrentRequests ? unsettled : [...unsettled, wait(timeout)]; + // errors are handled by the caller + // eslint-disable-next-line promise/catch-or-return + Promise.race(promises).then(() => { + const cb = queue.shift(); + cb?.(); + if (queue.length !== 0) { + // wait for stream consumer promise to be removed from unsettled + queueMicrotask(flushQueue); + } + }); + }; + + const throttled = Array.from({ length: total - immediateCount }, (_, i) => { + return new Promise((resolve, reject) => { + queue.push(() => { + pushUnsettled(factory(i + immediateCount)) + .then(resolve) + .catch(reject); + }); + }); + }); + + flushQueue(); + + return [...immediate, ...throttled]; +}; diff --git a/app/assets/javascripts/streaming/render_balancer.js b/app/assets/javascripts/streaming/render_balancer.js new file mode 100644 index 0000000000000..66929ff3a54ad --- /dev/null +++ b/app/assets/javascripts/streaming/render_balancer.js @@ -0,0 +1,36 @@ +export class RenderBalancer { + previousTimestamp = undefined; + + constructor({ increase, decrease, highFrameTime, lowFrameTime }) { + this.increase = increase; + this.decrease = decrease; + this.highFrameTime = highFrameTime; + this.lowFrameTime = lowFrameTime; + } + + render(fn) { + return new Promise((resolve) => { + const callback = (timestamp) => { + this.throttle(timestamp); + if (fn()) requestAnimationFrame(callback); + else resolve(); + }; + requestAnimationFrame(callback); + }); + } + + throttle(timestamp) { + const { previousTimestamp } = this; + this.previousTimestamp = timestamp; + if (previousTimestamp === undefined) return; + + const duration = Math.round(timestamp - previousTimestamp); + if (!duration) return; + + if (duration >= this.highFrameTime) { + this.decrease(); + } else if (duration < this.lowFrameTime) { + this.increase(); + } + } +} diff --git a/app/assets/javascripts/streaming/render_html_streams.js b/app/assets/javascripts/streaming/render_html_streams.js new file mode 100644 index 0000000000000..7201e541777c8 --- /dev/null +++ b/app/assets/javascripts/streaming/render_html_streams.js @@ -0,0 +1,40 @@ +import { HtmlStream } from '~/streaming/html_stream'; + +async function pipeStreams(domWriter, streamPromises) { + try { + for await (const stream of streamPromises.slice(0, -1)) { + await stream.pipeTo(domWriter, { preventClose: true }); + } + const stream = await streamPromises[streamPromises.length - 1]; + await stream.pipeTo(domWriter); + } catch (error) { + domWriter.abort(error); + } +} + +// this function (and the rest of the pipeline) expects polyfilled streams +// do not pass native streams here unless our browser support allows for it +// TODO: remove this notice when our WebStreams API support reaches 100% +export function renderHtmlStreams(streamPromises, element, config) { + if (streamPromises.length === 0) return Promise.resolve(); + + const chunkedHtmlStream = new HtmlStream(element).withChunkWriter(config); + + return new Promise((resolve, reject) => { + const domWriter = new WritableStream({ + write(chunk) { + return chunkedHtmlStream.write(chunk); + }, + close() { + chunkedHtmlStream.close(); + resolve(); + }, + abort(error) { + chunkedHtmlStream.abort(); + reject(error); + }, + }); + + pipeStreams(domWriter, streamPromises); + }); +} diff --git a/app/assets/stylesheets/framework/files.scss b/app/assets/stylesheets/framework/files.scss index 9ea5a66b3bc4c..b292adf9eac52 100644 --- a/app/assets/stylesheets/framework/files.scss +++ b/app/assets/stylesheets/framework/files.scss @@ -580,3 +580,31 @@ span.idiff { padding: 0; border-radius: 0 0 $border-radius-default $border-radius-default; } + +.blame-stream-container { + border-top: 1px solid $border-color; +} + +.blame-stream-loading { + $gradient-size: 16px; + position: sticky; + bottom: 0; + display: flex; + justify-content: center; + align-items: center; + margin-top: -$gradient-size; + height: $gl-spacing-scale-10; + border-top: $gradient-size solid transparent; + background-color: $white; + box-sizing: content-box; + background-clip: content-box; + + .gradient { + position: absolute; + left: 0; + right: 0; + top: -$gradient-size; + height: $gradient-size; + background: linear-gradient(to top, $white, transparentize($white, 1)); + } +} diff --git a/app/controllers/projects/blame_controller.rb b/app/controllers/projects/blame_controller.rb index cfff281604e5d..d41b347dc5aa8 100644 --- a/app/controllers/projects/blame_controller.rb +++ b/app/controllers/projects/blame_controller.rb @@ -23,13 +23,47 @@ def show environment_params[:find_latest] = true @environment = ::Environments::EnvironmentsByDeploymentsFinder.new(@project, current_user, environment_params).execute.last - blame_service = Projects::BlameService.new(@blob, @commit, params.permit(:page, :no_pagination)) + permitted_params = params.permit(:page, :no_pagination, :streaming) + blame_service = Projects::BlameService.new(@blob, @commit, permitted_params) @blame = Gitlab::View::Presenter::Factory.new(blame_service.blame, project: @project, path: @path, page: blame_service.page).fabricate! - @blame_pagination = blame_service.pagination + @entire_blame_path = full_blame_path(no_pagination: true) + @blame_pages_url = blame_pages_url(permitted_params) + if blame_service.streaming_possible + @entire_blame_path = full_blame_path(streaming: true) + end + + @streaming_enabled = blame_service.streaming_enabled + @blame_pagination = blame_service.pagination unless @streaming_enabled @blame_per_page = blame_service.per_page + + render locals: { total_extra_pages: blame_service.total_extra_pages } + end + + def page + @blob = @repository.blob_at(@commit.id, @path) + + environment_params = @repository.branch_exists?(@ref) ? { ref: @ref } : { commit: @commit } + environment_params[:find_latest] = true + @environment = ::Environments::EnvironmentsByDeploymentsFinder.new(@project, current_user, environment_params).execute.last + + blame_service = Projects::BlameService.new(@blob, @commit, params.permit(:page, :streaming)) + + @blame = Gitlab::View::Presenter::Factory.new(blame_service.blame, project: @project, path: @path, page: blame_service.page).fabricate! + + render partial: 'page' + end + + private + + def full_blame_path(params) + namespace_project_blame_path(namespace_id: @project.namespace, project_id: @project, id: @id, **params) + end + + def blame_pages_url(params) + namespace_project_blame_page_url(namespace_id: @project.namespace, project_id: @project, id: @id, **params) end end diff --git a/app/services/projects/blame_service.rb b/app/services/projects/blame_service.rb index 58e146e5a32b1..1ea16040655b1 100644 --- a/app/services/projects/blame_service.rb +++ b/app/services/projects/blame_service.rb @@ -5,15 +5,19 @@ module Projects class BlameService PER_PAGE = 1000 + STREAMING_FIRST_PAGE_SIZE = 200 + STREAMING_PER_PAGE = 2000 def initialize(blob, commit, params) @blob = blob @commit = commit - @page = extract_page(params) + @streaming_enabled = streaming_state(params) @pagination_enabled = pagination_state(params) + @page = extract_page(params) + @params = params end - attr_reader :page + attr_reader :page, :streaming_enabled def blame Gitlab::Blame.new(blob, commit, range: blame_range) @@ -28,7 +32,22 @@ def pagination end def per_page - PER_PAGE + streaming_enabled ? STREAMING_PER_PAGE : PER_PAGE + end + + def total_pages + total = (blob_lines_count.to_f / per_page).ceil + return total unless streaming_enabled + + ([blob_lines_count - STREAMING_FIRST_PAGE_SIZE, 0].max.to_f / per_page).ceil + 1 + end + + def total_extra_pages + [total_pages - 1, 0].max + end + + def streaming_possible + Feature.enabled?(:blame_page_streaming, commit.project) end private @@ -36,9 +55,16 @@ def per_page attr_reader :blob, :commit, :pagination_enabled def blame_range - return unless pagination_enabled + return unless pagination_enabled || streaming_enabled first_line = (page - 1) * per_page + 1 + + if streaming_enabled + return 1..STREAMING_FIRST_PAGE_SIZE if page == 1 + + first_line = STREAMING_FIRST_PAGE_SIZE + (page - 2) * per_page + 1 + end + last_line = (first_line + per_page).to_i - 1 first_line..last_line @@ -52,6 +78,12 @@ def extract_page(params) page end + def streaming_state(params) + return false unless streaming_possible + + Gitlab::Utils.to_boolean(params[:streaming], default: false) + end + def pagination_state(params) return false if Gitlab::Utils.to_boolean(params[:no_pagination], default: false) @@ -59,7 +91,7 @@ def pagination_state(params) end def overlimit?(page) - page * per_page >= blob_lines_count + per_page + page > total_pages end def blob_lines_count diff --git a/app/views/layouts/_head.html.haml b/app/views/layouts/_head.html.haml index dd441d0d1556b..f0c1b0901403d 100644 --- a/app/views/layouts/_head.html.haml +++ b/app/views/layouts/_head.html.haml @@ -10,6 +10,7 @@ %meta{ 'http-equiv' => 'X-UA-Compatible', content: 'IE=edge' } = render 'layouts/startup_js' + = yield :startup_js - if page_canonical_link %link{ rel: 'canonical', href: page_canonical_link } diff --git a/app/views/projects/blame/show.html.haml b/app/views/projects/blame/show.html.haml index 74b85a93c8ee7..ee7ca9cd35166 100644 --- a/app/views/projects/blame/show.html.haml +++ b/app/views/projects/blame/show.html.haml @@ -1,6 +1,15 @@ - page_title _("Blame"), @blob.path, @ref - add_page_specific_style 'page_bundles/tree' -- dataset = { testid: 'blob-content-holder', qa_selector: 'blame_file_content', per_page: @blame_per_page } +- if @streaming_enabled && total_extra_pages > 0 + - content_for :startup_js do + = javascript_tag do + :plain + window.blamePageStream = (() => { + const url = new URL("#{@blame_pages_url}"); + url.searchParams.set('page', 2); + return fetch(url).then(response => response.body); + })(); +- dataset = { testid: 'blob-content-holder', qa_selector: 'blame_file_content', per_page: @blame_per_page, total_extra_pages: total_extra_pages - 1, pages_url: @blame_pages_url } #blob-content-holder.tree-holder.js-per-page{ data: dataset } = render "projects/blob/breadcrumb", blob: @blob, blame: true @@ -26,11 +35,21 @@ .blame-table-wrapper = render partial: 'page' + - if @streaming_enabled + #blame-stream-container.blame-stream-container + - if @blame_pagination && @blame_pagination.total_pages > 1 .gl-display-flex.gl-justify-content-center.gl-flex-direction-column.gl-align-items-center.gl-p-3.gl-bg-gray-50.gl-border-t-solid.gl-border-t-1.gl-border-gray-100 - = _('For faster browsing, not all history is shown.') - = render Pajamas::ButtonComponent.new(href: namespace_project_blame_path(namespace_id: @project.namespace, project_id: @project, id: @id, no_pagination: true), size: :small, button_options: { class: 'gl-mt-3' }) do |c| - = _('View entire blame') + = render Pajamas::ButtonComponent.new(href: @entire_blame_path, size: :small, button_options: { class: 'gl-mt-3' }) do |c| + = _('Show full blame') + + - if @streaming_enabled + #blame-stream-loading.blame-stream-loading + .gradient + = gl_loading_icon(size: 'sm') + %span.gl-mx-2 + = _('Loading full blame...') - if @blame_pagination = paginate(@blame_pagination, theme: "gitlab") + diff --git a/config/feature_flags/development/blame_page_streaming.yml b/config/feature_flags/development/blame_page_streaming.yml new file mode 100644 index 0000000000000..44d64800dabe6 --- /dev/null +++ b/config/feature_flags/development/blame_page_streaming.yml @@ -0,0 +1,8 @@ +--- +name: blame_page_streaming +introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/110208 +rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/392890 +milestone: '15.10' +type: development +group: group::source code +default_enabled: false diff --git a/config/routes/repository.rb b/config/routes/repository.rb index 0202eb80b2327..60d3d37bdc85a 100644 --- a/config/routes/repository.rb +++ b/config/routes/repository.rb @@ -75,6 +75,7 @@ get '/tree/*id', to: 'tree#show', as: :tree get '/raw/*id', to: 'raw#show', as: :raw + get '/blame_page/*id', to: 'blame#page', as: :blame_page get '/blame/*id', to: 'blame#show', as: :blame get '/commits', to: 'commits#commits_root', as: :commits_root diff --git a/locale/gitlab.pot b/locale/gitlab.pot index 9c86b87a01a74..e9a4b02bb97ea 100644 --- a/locale/gitlab.pot +++ b/locale/gitlab.pot @@ -6780,6 +6780,9 @@ msgstr "" msgid "Blame" msgstr "" +msgid "Blame could not be loaded as a single page." +msgstr "" + msgid "BlobViewer|View on %{environmentName}" msgstr "" @@ -17975,9 +17978,6 @@ msgstr "" msgid "For example, the application using the token or the purpose of the token. Do not give sensitive information for the name of the token, as it will be visible to all %{resource_type} members." msgstr "" -msgid "For faster browsing, not all history is shown." -msgstr "" - msgid "For files larger than this limit, only index the file name. The file content is neither indexed nor searchable." msgstr "" @@ -25673,6 +25673,9 @@ msgstr "" msgid "Loading files, directories, and submodules in the path %{path} for commit reference %{ref}" msgstr "" +msgid "Loading full blame..." +msgstr "" + msgid "Loading more" msgstr "" @@ -40136,6 +40139,9 @@ msgstr "" msgid "Show filters" msgstr "" +msgid "Show full blame" +msgstr "" + msgid "Show group milestones" msgstr "" @@ -47381,6 +47387,9 @@ msgstr "" msgid "View blame" msgstr "" +msgid "View blame as separate pages" +msgstr "" + msgid "View blame prior to this change" msgstr "" @@ -47410,9 +47419,6 @@ msgstr "" msgid "View eligible approvers" msgstr "" -msgid "View entire blame" -msgstr "" - msgid "View exposed artifact" msgid_plural "View %d exposed artifacts" msgstr[0] "" diff --git a/package.json b/package.json index 2f6732ccc0b81..646362a49feaf 100644 --- a/package.json +++ b/package.json @@ -59,6 +59,7 @@ "@gitlab/ui": "56.2.0", "@gitlab/visual-review-tools": "1.7.3", "@gitlab/web-ide": "0.0.1-dev-20230223005157", + "@mattiasbuelens/web-streams-adapter": "^0.1.0", "@rails/actioncable": "6.1.4-7", "@rails/ujs": "6.1.4-7", "@sourcegraph/code-host-integration": "0.0.84", @@ -197,6 +198,7 @@ "vue-virtual-scroll-list": "^1.4.7", "vuedraggable": "^2.23.0", "vuex": "^3.6.2", + "web-streams-polyfill": "^3.2.1", "web-vitals": "^0.2.4", "webpack": "^4.46.0", "webpack-bundle-analyzer": "^4.6.1", diff --git a/spec/features/projects/blobs/blame_spec.rb b/spec/features/projects/blobs/blame_spec.rb index 27b7c6ef2d52e..d3558af81b8ab 100644 --- a/spec/features/projects/blobs/blame_spec.rb +++ b/spec/features/projects/blobs/blame_spec.rb @@ -38,7 +38,7 @@ def visit_blob_blame(path) within '[data-testid="blob-content-holder"]' do expect(page).to have_css('.blame-commit') expect(page).not_to have_css('.gl-pagination') - expect(page).not_to have_link _('View entire blame') + expect(page).not_to have_link _('Show full blame') end end @@ -53,7 +53,7 @@ def visit_blob_blame(path) within '[data-testid="blob-content-holder"]' do expect(page).to have_css('.blame-commit') expect(page).to have_css('.gl-pagination') - expect(page).to have_link _('View entire blame') + expect(page).to have_link _('Show full blame') expect(page).to have_css('#L1') expect(page).not_to have_css('#L3') @@ -85,19 +85,42 @@ def visit_blob_blame(path) end end - context 'when user clicks on View entire blame button' do + shared_examples 'a full blame page' do + context 'when user clicks on Show full blame button' do + before do + visit_blob_blame(path) + click_link _('Show full blame') + end + + it 'displays the blame page without pagination' do + within '[data-testid="blob-content-holder"]' do + expect(page).to have_css('#L1') + expect(page).to have_css('#L667') + expect(page).not_to have_css('.gl-pagination') + end + end + end + end + + context 'when streaming is disabled' do before do - visit_blob_blame(path) + stub_feature_flags(blame_page_streaming: false) end - it 'displays the blame page without pagination' do - within '[data-testid="blob-content-holder"]' do - click_link _('View entire blame') + it_behaves_like 'a full blame page' + end - expect(page).to have_css('#L1') - expect(page).to have_css('#L3') - expect(page).not_to have_css('.gl-pagination') - end + context 'when streaming is enabled' do + before do + stub_const('Projects::BlameService::STREAMING_PER_PAGE', 50) + end + + it_behaves_like 'a full blame page' + + it 'shows loading text' do + visit_blob_blame(path) + click_link _('Show full blame') + expect(page).to have_text('Loading full blame...') end end @@ -112,7 +135,7 @@ def visit_blob_blame(path) within '[data-testid="blob-content-holder"]' do expect(page).to have_css('.blame-commit') expect(page).not_to have_css('.gl-pagination') - expect(page).not_to have_link _('View entire blame') + expect(page).not_to have_link _('Show full blame') end end end diff --git a/spec/frontend/__helpers__/shared_test_setup.js b/spec/frontend/__helpers__/shared_test_setup.js index 2fe9fe89a90ec..7fc81cf65481e 100644 --- a/spec/frontend/__helpers__/shared_test_setup.js +++ b/spec/frontend/__helpers__/shared_test_setup.js @@ -1,4 +1,5 @@ /* Common setup for both unit and integration test environments */ +import { ReadableStream, WritableStream } from 'node:stream/web'; import * as jqueryMatchers from 'custom-jquery-matchers'; import Vue from 'vue'; import { enableAutoDestroy } from '@vue/test-utils'; @@ -13,6 +14,9 @@ import './dom_shims'; import './jquery'; import '~/commons/bootstrap'; +global.ReadableStream = ReadableStream; +global.WritableStream = WritableStream; + enableAutoDestroy(afterEach); // This module has some fairly decent visual test coverage in it's own repository. diff --git a/spec/frontend/__mocks__/lodash/debounce.js b/spec/frontend/__mocks__/lodash/debounce.js index d4fe2ce54063c..15f806fc31a1c 100644 --- a/spec/frontend/__mocks__/lodash/debounce.js +++ b/spec/frontend/__mocks__/lodash/debounce.js @@ -9,9 +9,22 @@ // Further reference: https://github.com/facebook/jest/issues/3465 export default (fn) => { - const debouncedFn = jest.fn().mockImplementation(fn); - debouncedFn.cancel = jest.fn(); - debouncedFn.flush = jest.fn().mockImplementation(() => { + let id; + const debouncedFn = jest.fn(function run(...args) { + // this is calculated in runtime so beforeAll hook works in tests + const timeout = global.JEST_DEBOUNCE_THROTTLE_TIMEOUT; + if (timeout) { + id = setTimeout(() => { + fn.apply(this, args); + }, timeout); + } else { + fn.apply(this, args); + } + }); + debouncedFn.cancel = jest.fn(() => { + clearTimeout(id); + }); + debouncedFn.flush = jest.fn(() => { const errorMessage = "The .flush() method returned by lodash.debounce is not yet implemented/mocked by the mock in 'spec/frontend/__mocks__/lodash/debounce.js'."; diff --git a/spec/frontend/__mocks__/lodash/throttle.js b/spec/frontend/__mocks__/lodash/throttle.js index e8a82654c78a7..b101466291802 100644 --- a/spec/frontend/__mocks__/lodash/throttle.js +++ b/spec/frontend/__mocks__/lodash/throttle.js @@ -1,4 +1,4 @@ // Similar to `lodash/debounce`, `lodash/throttle` also causes flaky specs. // See `./debounce.js` for more details. -export default (fn) => fn; +export { default } from './debounce'; diff --git a/spec/frontend/blame/streaming/index_spec.js b/spec/frontend/blame/streaming/index_spec.js new file mode 100644 index 0000000000000..a5069f8a7d868 --- /dev/null +++ b/spec/frontend/blame/streaming/index_spec.js @@ -0,0 +1,110 @@ +import waitForPromises from 'helpers/wait_for_promises'; +import { renderBlamePageStreams } from '~/blame/streaming'; +import { setHTMLFixture } from 'helpers/fixtures'; +import { renderHtmlStreams } from '~/streaming/render_html_streams'; +import { rateLimitStreamRequests } from '~/streaming/rate_limit_stream_requests'; +import { handleStreamedAnchorLink } from '~/streaming/handle_streamed_anchor_link'; +import { toPolyfillReadable } from '~/streaming/polyfills'; +import { createAlert } from '~/flash'; + +jest.mock('~/streaming/render_html_streams'); +jest.mock('~/streaming/rate_limit_stream_requests'); +jest.mock('~/streaming/handle_streamed_anchor_link'); +jest.mock('~/streaming/polyfills'); +jest.mock('~/sentry'); +jest.mock('~/flash'); + +global.fetch = jest.fn(); + +describe('renderBlamePageStreams', () => { + let stopAnchor; + const PAGES_URL = 'https://example.com/'; + const findStreamContainer = () => document.querySelector('#blame-stream-container'); + const findStreamLoadingIndicator = () => document.querySelector('#blame-stream-loading'); + + const setupHtml = (totalExtraPages = 0) => { + setHTMLFixture(` + <div id="blob-content-holder" + data-total-extra-pages="${totalExtraPages}" + data-pages-url="${PAGES_URL}" + ></div> + <div id="blame-stream-container"></div> + <div id="blame-stream-loading"></div> + `); + }; + + handleStreamedAnchorLink.mockImplementation(() => stopAnchor); + rateLimitStreamRequests.mockImplementation(({ factory, total }) => { + return Array.from({ length: total }, (_, i) => { + return Promise.resolve(factory(i)); + }); + }); + toPolyfillReadable.mockImplementation((obj) => obj); + + beforeEach(() => { + stopAnchor = jest.fn(); + fetch.mockClear(); + }); + + it('does nothing for an empty page', async () => { + await renderBlamePageStreams(); + + expect(handleStreamedAnchorLink).not.toHaveBeenCalled(); + expect(renderHtmlStreams).not.toHaveBeenCalled(); + }); + + it('renders a single stream', async () => { + let res; + const stream = new Promise((resolve) => { + res = resolve; + }); + renderHtmlStreams.mockImplementationOnce(() => stream); + setupHtml(); + + renderBlamePageStreams(stream); + + expect(handleStreamedAnchorLink).toHaveBeenCalledTimes(1); + expect(stopAnchor).toHaveBeenCalledTimes(0); + expect(renderHtmlStreams).toHaveBeenCalledWith([stream], findStreamContainer()); + expect(findStreamLoadingIndicator()).not.toBe(null); + + res(); + await waitForPromises(); + + expect(stopAnchor).toHaveBeenCalledTimes(1); + expect(findStreamLoadingIndicator()).toBe(null); + }); + + it('renders rest of the streams', async () => { + const stream = Promise.resolve(); + const stream2 = Promise.resolve({ body: null }); + fetch.mockImplementationOnce(() => stream2); + setupHtml(1); + + await renderBlamePageStreams(stream); + + expect(fetch.mock.calls[0][0].toString()).toBe(`${PAGES_URL}?page=3`); + expect(renderHtmlStreams).toHaveBeenCalledWith([stream, stream2], findStreamContainer()); + }); + + it('shows an error message when failed', async () => { + const stream = Promise.resolve(); + const error = new Error(); + renderHtmlStreams.mockImplementationOnce(() => Promise.reject(error)); + setupHtml(); + + try { + await renderBlamePageStreams(stream); + } catch (err) { + expect(err).toBe(error); + } + + expect(createAlert).toHaveBeenCalledWith({ + message: 'Blame could not be loaded as a single page.', + primaryButton: { + text: 'View blame as separate pages', + clickHandler: expect.any(Function), + }, + }); + }); +}); diff --git a/spec/frontend/streaming/chunk_writer_spec.js b/spec/frontend/streaming/chunk_writer_spec.js new file mode 100644 index 0000000000000..2aadb33283867 --- /dev/null +++ b/spec/frontend/streaming/chunk_writer_spec.js @@ -0,0 +1,214 @@ +import { ChunkWriter } from '~/streaming/chunk_writer'; +import { RenderBalancer } from '~/streaming/render_balancer'; + +jest.mock('~/streaming/render_balancer'); + +describe('ChunkWriter', () => { + let accumulator = ''; + let write; + let close; + let abort; + let config; + let render; + + const createChunk = (text) => { + const encoder = new TextEncoder(); + return encoder.encode(text); + }; + + const createHtmlStream = () => { + write = jest.fn((part) => { + accumulator += part; + }); + close = jest.fn(); + abort = jest.fn(); + return { + write, + close, + abort, + }; + }; + + const createWriter = () => { + return new ChunkWriter(createHtmlStream(), config); + }; + + const pushChunks = (...chunks) => { + const writer = createWriter(); + chunks.forEach((chunk) => { + writer.write(createChunk(chunk)); + }); + writer.close(); + }; + + afterAll(() => { + global.JEST_DEBOUNCE_THROTTLE_TIMEOUT = undefined; + }); + + beforeEach(() => { + global.JEST_DEBOUNCE_THROTTLE_TIMEOUT = 100; + accumulator = ''; + config = undefined; + render = jest.fn((cb) => { + while (cb()) { + // render until 'false' + } + }); + RenderBalancer.mockImplementation(() => ({ render })); + }); + + describe('when chunk length must be "1"', () => { + beforeEach(() => { + config = { minChunkSize: 1, maxChunkSize: 1 }; + }); + + it('splits big chunks into smaller ones', () => { + const text = 'foobar'; + pushChunks(text); + expect(accumulator).toBe(text); + expect(write).toHaveBeenCalledTimes(text.length); + }); + + it('handles small emoji chunks', () => { + const text = 'foo👀bar👨â€ðŸ‘©â€ðŸ‘§baz👧👧ðŸ»ðŸ‘§ðŸ¼ðŸ‘§ðŸ½ðŸ‘§ðŸ¾ðŸ‘§ðŸ¿'; + pushChunks(text); + expect(accumulator).toBe(text); + expect(write).toHaveBeenCalledTimes(createChunk(text).length); + }); + }); + + describe('when chunk length must not be lower than "5" and exceed "10"', () => { + beforeEach(() => { + config = { minChunkSize: 5, maxChunkSize: 10 }; + }); + + it('joins small chunks', () => { + const text = '12345'; + pushChunks(...text.split('')); + expect(accumulator).toBe(text); + expect(write).toHaveBeenCalledTimes(1); + expect(close).toHaveBeenCalledTimes(1); + }); + + it('handles overflow with small chunks', () => { + const text = '123456789'; + pushChunks(...text.split('')); + expect(accumulator).toBe(text); + expect(write).toHaveBeenCalledTimes(2); + expect(close).toHaveBeenCalledTimes(1); + }); + + it('calls flush on small chunks', () => { + global.JEST_DEBOUNCE_THROTTLE_TIMEOUT = undefined; + const flushAccumulator = jest.spyOn(ChunkWriter.prototype, 'flushAccumulator'); + const text = '1'; + pushChunks(text); + expect(accumulator).toBe(text); + expect(flushAccumulator).toHaveBeenCalledTimes(1); + }); + + it('calls flush on large chunks', () => { + const flushAccumulator = jest.spyOn(ChunkWriter.prototype, 'flushAccumulator'); + const text = '1234567890123'; + const writer = createWriter(); + writer.write(createChunk(text)); + jest.runAllTimers(); + expect(accumulator).toBe(text); + expect(flushAccumulator).toHaveBeenCalledTimes(1); + }); + }); + + describe('chunk balancing', () => { + let increase; + let decrease; + let renderOnce; + + beforeEach(() => { + render = jest.fn((cb) => { + let next = true; + renderOnce = () => { + if (!next) return; + next = cb(); + }; + }); + RenderBalancer.mockImplementation(({ increase: inc, decrease: dec }) => { + increase = jest.fn(inc); + decrease = jest.fn(dec); + return { + render, + }; + }); + }); + + describe('when frame time exceeds low limit', () => { + beforeEach(() => { + config = { + minChunkSize: 1, + maxChunkSize: 5, + balanceRate: 10, + }; + }); + + it('increases chunk size', () => { + const text = '111222223'; + const writer = createWriter(); + const chunk = createChunk(text); + + writer.write(chunk); + + renderOnce(); + increase(); + renderOnce(); + renderOnce(); + + writer.close(); + + expect(accumulator).toBe(text); + expect(write.mock.calls).toMatchObject([['111'], ['22222'], ['3']]); + expect(close).toHaveBeenCalledTimes(1); + }); + }); + + describe('when frame time exceeds high limit', () => { + beforeEach(() => { + config = { + minChunkSize: 1, + maxChunkSize: 10, + balanceRate: 2, + }; + }); + + it('decreases chunk size', () => { + const text = '1111112223345'; + const writer = createWriter(); + const chunk = createChunk(text); + + writer.write(chunk); + + renderOnce(); + decrease(); + + renderOnce(); + decrease(); + + renderOnce(); + decrease(); + + renderOnce(); + renderOnce(); + + writer.close(); + + expect(accumulator).toBe(text); + expect(write.mock.calls).toMatchObject([['111111'], ['222'], ['33'], ['4'], ['5']]); + expect(close).toHaveBeenCalledTimes(1); + }); + }); + }); + + it('calls abort on htmlStream', () => { + const writer = createWriter(); + writer.abort(); + expect(abort).toHaveBeenCalledTimes(1); + }); +}); diff --git a/spec/frontend/streaming/handle_streamed_anchor_link_spec.js b/spec/frontend/streaming/handle_streamed_anchor_link_spec.js new file mode 100644 index 0000000000000..ef17957b2fc6a --- /dev/null +++ b/spec/frontend/streaming/handle_streamed_anchor_link_spec.js @@ -0,0 +1,132 @@ +import { resetHTMLFixture, setHTMLFixture } from 'helpers/fixtures'; +import waitForPromises from 'helpers/wait_for_promises'; +import { handleStreamedAnchorLink } from '~/streaming/handle_streamed_anchor_link'; +import { scrollToElement } from '~/lib/utils/common_utils'; +import LineHighlighter from '~/blob/line_highlighter'; +import { TEST_HOST } from 'spec/test_constants'; + +jest.mock('~/lib/utils/common_utils'); +jest.mock('~/blob/line_highlighter'); + +describe('handleStreamedAnchorLink', () => { + const ANCHOR_START = 'L100'; + const ANCHOR_END = '300'; + const findRoot = () => document.querySelector('#root'); + + afterEach(() => { + resetHTMLFixture(); + }); + + describe('when single line anchor is given', () => { + beforeEach(() => { + delete window.location; + window.location = new URL(`${TEST_HOST}#${ANCHOR_START}`); + }); + + describe('when element is present', () => { + beforeEach(() => { + setHTMLFixture(`<div id="root"><div id="${ANCHOR_START}"></div></div>`); + handleStreamedAnchorLink(findRoot()); + }); + + it('does nothing', async () => { + await waitForPromises(); + expect(scrollToElement).not.toHaveBeenCalled(); + }); + }); + + describe('when element is streamed', () => { + let stop; + const insertElement = () => { + findRoot().insertAdjacentHTML('afterbegin', `<div id="${ANCHOR_START}"></div>`); + }; + + beforeEach(() => { + setHTMLFixture('<div id="root"></div>'); + stop = handleStreamedAnchorLink(findRoot()); + }); + + afterEach(() => { + stop = undefined; + }); + + it('scrolls to the anchor when inserted', async () => { + insertElement(); + await waitForPromises(); + expect(scrollToElement).toHaveBeenCalledTimes(1); + expect(LineHighlighter).toHaveBeenCalledTimes(1); + }); + + it("doesn't scroll to the anchor when destroyed", async () => { + stop(); + insertElement(); + await waitForPromises(); + expect(scrollToElement).not.toHaveBeenCalled(); + }); + }); + }); + + describe('when line range anchor is given', () => { + beforeEach(() => { + delete window.location; + window.location = new URL(`${TEST_HOST}#${ANCHOR_START}-${ANCHOR_END}`); + }); + + describe('when last element is present', () => { + beforeEach(() => { + setHTMLFixture(`<div id="root"><div id="L${ANCHOR_END}"></div></div>`); + handleStreamedAnchorLink(findRoot()); + }); + + it('does nothing', async () => { + await waitForPromises(); + expect(scrollToElement).not.toHaveBeenCalled(); + }); + }); + + describe('when last element is streamed', () => { + let stop; + const insertElement = () => { + findRoot().insertAdjacentHTML( + 'afterbegin', + `<div id="${ANCHOR_START}"></div><div id="L${ANCHOR_END}"></div>`, + ); + }; + + beforeEach(() => { + setHTMLFixture('<div id="root"></div>'); + stop = handleStreamedAnchorLink(findRoot()); + }); + + afterEach(() => { + stop = undefined; + }); + + it('scrolls to the anchor when inserted', async () => { + insertElement(); + await waitForPromises(); + expect(scrollToElement).toHaveBeenCalledTimes(1); + expect(LineHighlighter).toHaveBeenCalledTimes(1); + }); + + it("doesn't scroll to the anchor when destroyed", async () => { + stop(); + insertElement(); + await waitForPromises(); + expect(scrollToElement).not.toHaveBeenCalled(); + }); + }); + }); + + describe('when anchor is not given', () => { + beforeEach(() => { + setHTMLFixture(`<div id="root"></div>`); + handleStreamedAnchorLink(findRoot()); + }); + + it('does nothing', async () => { + await waitForPromises(); + expect(scrollToElement).not.toHaveBeenCalled(); + }); + }); +}); diff --git a/spec/frontend/streaming/html_stream_spec.js b/spec/frontend/streaming/html_stream_spec.js new file mode 100644 index 0000000000000..115a9ddc803a2 --- /dev/null +++ b/spec/frontend/streaming/html_stream_spec.js @@ -0,0 +1,46 @@ +import { HtmlStream } from '~/streaming/html_stream'; +import { ChunkWriter } from '~/streaming/chunk_writer'; + +jest.mock('~/streaming/chunk_writer'); + +describe('HtmlStream', () => { + let write; + let close; + let streamingElement; + + beforeEach(() => { + write = jest.fn(); + close = jest.fn(); + jest.spyOn(Document.prototype, 'write').mockImplementation(write); + jest.spyOn(Document.prototype, 'close').mockImplementation(close); + jest.spyOn(Document.prototype, 'querySelector').mockImplementation(() => { + streamingElement = document.createElement('div'); + return streamingElement; + }); + }); + + it('attaches to original document', () => { + // eslint-disable-next-line no-new + new HtmlStream(document.body); + expect(document.body.contains(streamingElement)).toBe(true); + }); + + it('can write to a document', () => { + const htmlStream = new HtmlStream(document.body); + htmlStream.write('foo'); + htmlStream.close(); + expect(write.mock.calls).toEqual([['<streaming-element>'], ['foo'], ['</streaming-element>']]); + expect(close).toHaveBeenCalledTimes(1); + }); + + it('returns chunked writer', () => { + const htmlStream = new HtmlStream(document.body).withChunkWriter(); + expect(htmlStream).toBeInstanceOf(ChunkWriter); + }); + + it('closes on abort', () => { + const htmlStream = new HtmlStream(document.body); + htmlStream.abort(); + expect(close).toHaveBeenCalled(); + }); +}); diff --git a/spec/frontend/streaming/rate_limit_stream_requests_spec.js b/spec/frontend/streaming/rate_limit_stream_requests_spec.js new file mode 100644 index 0000000000000..02e3cf93014ac --- /dev/null +++ b/spec/frontend/streaming/rate_limit_stream_requests_spec.js @@ -0,0 +1,155 @@ +import waitForPromises from 'helpers/wait_for_promises'; +import { rateLimitStreamRequests } from '~/streaming/rate_limit_stream_requests'; + +describe('rateLimitStreamRequests', () => { + const encoder = new TextEncoder('utf-8'); + const createStreamResponse = (content = 'foo') => + new ReadableStream({ + pull(controller) { + controller.enqueue(encoder.encode(content)); + controller.close(); + }, + }); + + const createFactory = (content) => { + return jest.fn(() => { + return Promise.resolve(createStreamResponse(content)); + }); + }; + + it('does nothing for zero total requests', () => { + const factory = jest.fn(); + const requests = rateLimitStreamRequests({ + factory, + total: 0, + }); + expect(factory).toHaveBeenCalledTimes(0); + expect(requests.length).toBe(0); + }); + + it('does not exceed total requests', () => { + const factory = createFactory(); + const requests = rateLimitStreamRequests({ + factory, + immediateCount: 100, + maxConcurrentRequests: 100, + total: 2, + }); + expect(factory).toHaveBeenCalledTimes(2); + expect(requests.length).toBe(2); + }); + + it('creates immediate requests', () => { + const factory = createFactory(); + const requests = rateLimitStreamRequests({ + factory, + maxConcurrentRequests: 2, + total: 2, + }); + expect(factory).toHaveBeenCalledTimes(2); + expect(requests.length).toBe(2); + }); + + it('returns correct values', async () => { + const fixture = 'foobar'; + const factory = createFactory(fixture); + const requests = rateLimitStreamRequests({ + factory, + maxConcurrentRequests: 2, + total: 2, + }); + + const decoder = new TextDecoder('utf-8'); + let result = ''; + for await (const stream of requests) { + await stream.pipeTo( + new WritableStream({ + // eslint-disable-next-line no-loop-func + write(content) { + result += decoder.decode(content); + }, + }), + ); + } + + expect(result).toBe(fixture + fixture); + }); + + it('delays rate limited requests', async () => { + const factory = createFactory(); + const requests = rateLimitStreamRequests({ + factory, + maxConcurrentRequests: 2, + total: 3, + }); + expect(factory).toHaveBeenCalledTimes(2); + expect(requests.length).toBe(3); + + await waitForPromises(); + + expect(factory).toHaveBeenCalledTimes(3); + }); + + it('runs next request after previous has been fulfilled', async () => { + let res; + const factory = jest + .fn() + .mockImplementationOnce( + () => + new Promise((resolve) => { + res = resolve; + }), + ) + .mockImplementationOnce(() => Promise.resolve(createStreamResponse())); + const requests = rateLimitStreamRequests({ + factory, + maxConcurrentRequests: 1, + total: 2, + }); + expect(factory).toHaveBeenCalledTimes(1); + expect(requests.length).toBe(2); + + await waitForPromises(); + + expect(factory).toHaveBeenCalledTimes(1); + + res(createStreamResponse()); + + await waitForPromises(); + + expect(factory).toHaveBeenCalledTimes(2); + }); + + it('uses timer to schedule next request', async () => { + let res; + const factory = jest + .fn() + .mockImplementationOnce( + () => + new Promise((resolve) => { + res = resolve; + }), + ) + .mockImplementationOnce(() => Promise.resolve(createStreamResponse())); + const requests = rateLimitStreamRequests({ + factory, + immediateCount: 1, + maxConcurrentRequests: 2, + total: 2, + timeout: 9999, + }); + expect(factory).toHaveBeenCalledTimes(1); + expect(requests.length).toBe(2); + + await waitForPromises(); + + expect(factory).toHaveBeenCalledTimes(1); + + jest.runAllTimers(); + + await waitForPromises(); + + expect(factory).toHaveBeenCalledTimes(2); + res(createStreamResponse()); + }); +}); diff --git a/spec/frontend/streaming/render_balancer_spec.js b/spec/frontend/streaming/render_balancer_spec.js new file mode 100644 index 0000000000000..dae0c98d678d8 --- /dev/null +++ b/spec/frontend/streaming/render_balancer_spec.js @@ -0,0 +1,69 @@ +import { RenderBalancer } from '~/streaming/render_balancer'; + +const HIGH_FRAME_TIME = 100; +const LOW_FRAME_TIME = 10; + +describe('renderBalancer', () => { + let frameTime = 0; + let frameTimeDelta = 0; + let decrease; + let increase; + + const createBalancer = () => { + decrease = jest.fn(); + increase = jest.fn(); + return new RenderBalancer({ + highFrameTime: HIGH_FRAME_TIME, + lowFrameTime: LOW_FRAME_TIME, + increase, + decrease, + }); + }; + + const renderTimes = (times) => { + const balancer = createBalancer(); + return new Promise((resolve) => { + let counter = 0; + balancer.render(() => { + if (counter === times) { + resolve(counter); + return false; + } + counter += 1; + return true; + }); + }); + }; + + beforeEach(() => { + jest.spyOn(window, 'requestAnimationFrame').mockImplementation((cb) => { + frameTime += frameTimeDelta; + cb(frameTime); + }); + }); + + afterEach(() => { + window.requestAnimationFrame.mockRestore(); + frameTime = 0; + frameTimeDelta = 0; + }); + + it('renders in a loop', async () => { + const count = await renderTimes(5); + expect(count).toBe(5); + }); + + it('calls decrease', async () => { + frameTimeDelta = 200; + await renderTimes(5); + expect(decrease).toHaveBeenCalled(); + expect(increase).not.toHaveBeenCalled(); + }); + + it('calls increase', async () => { + frameTimeDelta = 1; + await renderTimes(5); + expect(increase).toHaveBeenCalled(); + expect(decrease).not.toHaveBeenCalled(); + }); +}); diff --git a/spec/frontend/streaming/render_html_streams_spec.js b/spec/frontend/streaming/render_html_streams_spec.js new file mode 100644 index 0000000000000..55cef0ea4693d --- /dev/null +++ b/spec/frontend/streaming/render_html_streams_spec.js @@ -0,0 +1,96 @@ +import { ReadableStream } from 'node:stream/web'; +import { renderHtmlStreams } from '~/streaming/render_html_streams'; +import { HtmlStream } from '~/streaming/html_stream'; +import waitForPromises from 'helpers/wait_for_promises'; + +jest.mock('~/streaming/html_stream'); +jest.mock('~/streaming/constants', () => { + return { + HIGH_FRAME_TIME: 0, + LOW_FRAME_TIME: 0, + MAX_CHUNK_SIZE: 1, + MIN_CHUNK_SIZE: 1, + }; +}); + +const firstStreamContent = 'foobar'; +const secondStreamContent = 'bazqux'; + +describe('renderHtmlStreams', () => { + let htmlWriter; + const encoder = new TextEncoder(); + const createSingleChunkStream = (chunk) => { + const encoded = encoder.encode(chunk); + const stream = new ReadableStream({ + pull(controller) { + controller.enqueue(encoded); + controller.close(); + }, + }); + return [stream, encoded]; + }; + + beforeEach(() => { + htmlWriter = { + write: jest.fn(), + close: jest.fn(), + abort: jest.fn(), + }; + jest.spyOn(HtmlStream.prototype, 'withChunkWriter').mockReturnValue(htmlWriter); + }); + + it('renders a single stream', async () => { + const [stream, encoded] = createSingleChunkStream(firstStreamContent); + + await renderHtmlStreams([Promise.resolve(stream)], document.body); + + expect(htmlWriter.write).toHaveBeenCalledWith(encoded); + expect(htmlWriter.close).toHaveBeenCalledTimes(1); + }); + + it('renders stream sequence', async () => { + const [stream1, encoded1] = createSingleChunkStream(firstStreamContent); + const [stream2, encoded2] = createSingleChunkStream(secondStreamContent); + + await renderHtmlStreams([Promise.resolve(stream1), Promise.resolve(stream2)], document.body); + + expect(htmlWriter.write.mock.calls).toMatchObject([[encoded1], [encoded2]]); + expect(htmlWriter.close).toHaveBeenCalledTimes(1); + }); + + it("doesn't wait for the whole sequence to resolve before streaming", async () => { + const [stream1, encoded1] = createSingleChunkStream(firstStreamContent); + const [stream2, encoded2] = createSingleChunkStream(secondStreamContent); + + let res; + const delayedStream = new Promise((resolve) => { + res = resolve; + }); + + renderHtmlStreams([Promise.resolve(stream1), delayedStream], document.body); + + await waitForPromises(); + + expect(htmlWriter.write.mock.calls).toMatchObject([[encoded1]]); + expect(htmlWriter.close).toHaveBeenCalledTimes(0); + + res(stream2); + await waitForPromises(); + + expect(htmlWriter.write.mock.calls).toMatchObject([[encoded1], [encoded2]]); + expect(htmlWriter.close).toHaveBeenCalledTimes(1); + }); + + it('closes HtmlStream on error', async () => { + const [stream1] = createSingleChunkStream(firstStreamContent); + const error = new Error(); + + try { + await renderHtmlStreams([Promise.resolve(stream1), Promise.reject(error)], document.body); + } catch (err) { + expect(err).toBe(error); + } + + expect(htmlWriter.abort).toHaveBeenCalledTimes(1); + }); +}); diff --git a/yarn.lock b/yarn.lock index 80d9713aa2f43..9279d49611fbd 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1832,6 +1832,11 @@ resolved "https://registry.yarnpkg.com/@linaria/core/-/core-3.0.0-beta.13.tgz#049c5be5faa67e341e413a0f6b641d5d78d91056" integrity sha512-3zEi5plBCOsEzUneRVuQb+2SAx3qaC1dj0FfFAI6zIJQoDWu0dlSwKijMRack7oO9tUWrchfj3OkKQAd1LBdVg== +"@mattiasbuelens/web-streams-adapter@^0.1.0": + version "0.1.0" + resolved "https://registry.yarnpkg.com/@mattiasbuelens/web-streams-adapter/-/web-streams-adapter-0.1.0.tgz#607b5a25682f4ae2741da7ba6df39302505336b3" + integrity sha512-oV4PyZfwJNtmFWhvlJLqYIX1Nn22ML8FZpS16ZUKv0hg7414xV1fjsGqxQzLT2dyK92TKxsJSwMOd7VNHAtPmA== + "@miragejs/pretender-node-polyfill@^0.1.0": version "0.1.2" resolved "https://registry.yarnpkg.com/@miragejs/pretender-node-polyfill/-/pretender-node-polyfill-0.1.2.tgz#d26b6b7483fb70cd62189d05c95d2f67153e43f2" -- GitLab