Skip to content

Commit

Permalink
Revert "fix: adding emergency garbage collection for chromium-based b…
Browse files Browse the repository at this point in the history
…rowsers (#25521)"

This reverts commit 83b2cc1.
  • Loading branch information
mschile committed Jan 23, 2023
1 parent 90187ea commit 8347baa
Show file tree
Hide file tree
Showing 5 changed files with 31 additions and 138 deletions.
20 changes: 10 additions & 10 deletions .circleci/workflows.yml
Original file line number Diff line number Diff line change
Expand Up @@ -1357,7 +1357,7 @@ jobs:
path: /tmp/cypress
- store-npm-logs

driver-integration-memory-tests:
memory-driver-tests:
<<: *defaults
parameters:
<<: *defaultsParameters
Expand Down Expand Up @@ -2431,7 +2431,7 @@ linux-x64-workflow: &linux-x64-workflow
context: test-runner:cypress-record-key
requires:
- build
- driver-integration-memory-tests:
- memory-driver-tests:
requires:
- build
- run-frontend-shared-component-tests-chrome:
Expand Down Expand Up @@ -2675,8 +2675,8 @@ linux-arm64-workflow: &linux-arm64-workflow
resource_class: arm.medium
requires:
- linux-arm64-build
- driver-integration-memory-tests:
name: linux-arm64-driver-integration-memory-tests
- memory-driver-tests:
name: linux-arm64-memory-driver-tests
executor: linux-arm64
resource_class: arm.medium
requires:
Expand Down Expand Up @@ -2721,8 +2721,8 @@ darwin-x64-workflow: &darwin-x64-workflow
resource_class: macos.x86.medium.gen2
requires:
- darwin-x64-build
- driver-integration-memory-tests:
name: darwin-x64-driver-integration-memory-tests
- memory-driver-tests:
name: darwin-x64-memory-driver-tests
executor: mac
resource_class: macos.x86.medium.gen2
requires:
Expand Down Expand Up @@ -2760,8 +2760,8 @@ darwin-arm64-workflow: &darwin-arm64-workflow
resource_class: cypress-io/latest_m1
requires:
- darwin-arm64-build
- driver-integration-memory-tests:
name: darwin-arm64-driver-integration-memory-tests
- memory-driver-tests:
name: darwin-arm64-memory-driver-tests
executor: darwin-arm64
resource_class: cypress-io/latest_m1
requires:
Expand Down Expand Up @@ -2828,8 +2828,8 @@ windows-workflow: &windows-workflow
resource_class: windows.large
requires:
- windows-build
- driver-integration-memory-tests:
name: windows-driver-integration-memory-tests
- memory-driver-tests:
name: windows-memory-driver-tests
executor: windows
resource_class: windows.large
requires:
Expand Down
7 changes: 0 additions & 7 deletions packages/driver/cypress/e2e/memory/memory_large_test.cy.js

This file was deleted.

41 changes: 12 additions & 29 deletions packages/server/lib/browsers/memory/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ const debug = debugModule('cypress:server:browsers:memory')
const debugVerbose = debugModule('cypress-verbose:server:browsers:memory')

const MEMORY_THRESHOLD_PERCENTAGE = Number(process.env.CYPRESS_INTERNAL_MEMORY_THRESHOLD_PERCENTAGE) || 50
const EMERGENCY_MEMORY_THRESHOLD_PERCENTAGE = Number(process.env.CYPRESS_INTERNAL_EMERGENCY_MEMORY_THRESHOLD_PERCENTAGE) || 90
const MEMORY_PROFILER_INTERVAL = Number(process.env.CYPRESS_INTERNAL_MEMORY_PROFILER_INTERVAL) || 1000
const MEMORY_FOLDER = process.env.CYPRESS_INTERNAL_MEMORY_FOLDER_PATH || path.join('cypress', 'logs', 'memory')
const SAVE_MEMORY_STATS = ['1', 'true'].includes(process.env.CYPRESS_INTERNAL_MEMORY_SAVE_STATS?.toLowerCase() as string)
Expand Down Expand Up @@ -45,23 +44,17 @@ export type MemoryHandler = {
/**
* Algorithm:
*
* When the spec run starts:
* When the test runs starts:
* 1. set total mem limit for the container/host by reading off cgroup memory limits (if available) otherwise use os.totalmem()
* 2. set js heap size limit by reading off the browser
* 3. turn on memory profiler
*
* On a defined interval (e.g. 1s):
* 1. set current mem available for the container/host by reading off cgroup memory usage (if available) otherwise use si.mem().available
* 2. set current renderer mem usage
* 3. set max avail render mem to minimum of v8 heap size limit and total available mem (current available mem + current renderer mem usage)
* 4. calc % of memory used, current renderer mem usage / max avail render mem
* 5. if % of memory used exceeds the emergency memory threshold percentage (e.g. 90%) do a GC
*
* Before each test:
* 1. if any interval exceeded the defined memory threshold (e.g. 50%), do a GC
*
* After the spec run ends:
* 1. turn off memory profiler
* 1. if that exceeds the defined memory threshold percentage (e.g. 50%) do a GC
*/

/**
Expand Down Expand Up @@ -210,7 +203,7 @@ export const getAvailableMemory: () => Promise<number> = measure(() => {
/**
* Calculates the memory stats used to determine if garbage collection should be run before the next test starts.
*/
export const calculateMemoryStats: (automation: Automation) => Promise<void> = measure(async (automation: Automation) => {
export const calculateMemoryStats: () => Promise<void> = measure(async () => {
// retrieve the available memory and the renderer process memory usage
const [currentAvailableMemory, rendererProcessMemRss] = await Promise.all([
getAvailableMemory(),
Expand All @@ -228,20 +221,12 @@ export const calculateMemoryStats: (automation: Automation) => Promise<void> = m
const maxAvailableRendererMemory = Math.min(jsHeapSizeLimit, currentAvailableMemory + rendererProcessMemRss)

const rendererUsagePercentage = (rendererProcessMemRss / maxAvailableRendererMemory) * 100
// if the renderer's memory is above the MEMORY_THRESHOLD_PERCENTAGE, we should collect garbage on the next test
// if we're using more than MEMORY_THRESHOLD_PERCENTAGE of the available memory,
const shouldCollectGarbage = rendererUsagePercentage >= MEMORY_THRESHOLD_PERCENTAGE && !SKIP_GC

// if we should collect garbage, set the flag to true so we can collect garbage on the next test
collectGarbageOnNextTest = collectGarbageOnNextTest || shouldCollectGarbage

// if the renderer's memory is above the EMERGENCY_MEMORY_THRESHOLD_PERCENTAGE, we should perform an emergency garbage collection now
const shouldEmergencyCollectGarbage = rendererUsagePercentage >= EMERGENCY_MEMORY_THRESHOLD_PERCENTAGE && !SKIP_GC

if (shouldEmergencyCollectGarbage) {
debug('emergency garbage collection triggered')
await checkMemoryPressure(automation, shouldEmergencyCollectGarbage)
}

// set all the memory stats on the stats log
statsLog.jsHeapSizeLimit = jsHeapSizeLimit
statsLog.totalMemoryLimit = totalMemoryLimit
Expand All @@ -251,8 +236,6 @@ export const calculateMemoryStats: (automation: Automation) => Promise<void> = m
statsLog.currentAvailableMemory = currentAvailableMemory
statsLog.maxAvailableRendererMemory = maxAvailableRendererMemory
statsLog.shouldCollectGarbage = shouldCollectGarbage
statsLog.emergencyGarbageCollected = shouldEmergencyCollectGarbage
statsLog.emergencyRendererMemoryThreshold = maxAvailableRendererMemory * (EMERGENCY_MEMORY_THRESHOLD_PERCENTAGE / 100)
statsLog.timestamp = Date.now()
}, { name: 'calculateMemoryStats', save: true })

Expand Down Expand Up @@ -281,8 +264,8 @@ const checkMemoryPressureAndLog = async ({ automation, test }: { automation: Aut
* Collects the browser's garbage if it previously exceeded the threshold when it was measured.
* @param automation the automation client used to collect garbage
*/
const checkMemoryPressure: (automation: Automation, emergencyCollectGarbage?: boolean) => Promise<void> = measure(async (automation: Automation, emergencyCollectGarbage: boolean = false) => {
if (collectGarbageOnNextTest || emergencyCollectGarbage) {
const checkMemoryPressure: (automation: Automation) => Promise<void> = measure(async (automation: Automation) => {
if (collectGarbageOnNextTest) {
debug('forcing garbage collection')
try {
await automation.request('collect:garbage', null, null)
Expand All @@ -309,24 +292,24 @@ const addCumulativeStats = (stats: { [key: string]: any }) => {
/**
* Gathers the memory stats and schedules the next check.
*/
const gatherMemoryStats = async (automation: Automation) => {
const gatherMemoryStats = async () => {
try {
await calculateMemoryStats(automation)
await calculateMemoryStats()
addCumulativeStats(statsLog)
statsLog = {}
} catch (err) {
debug('error gathering memory stats: %o', err)
}
scheduleMemoryCheck(automation)
scheduleMemoryCheck()
}

/**
* Schedules the next gathering of memory stats based on the MEMORY_PROFILER_INTERVAL.
*/
const scheduleMemoryCheck = (automation: Automation) => {
const scheduleMemoryCheck = () => {
if (started) {
// not setinterval, since gatherMemoryStats is asynchronous
timer = setTimeout(() => gatherMemoryStats(automation), MEMORY_PROFILER_INTERVAL)
timer = setTimeout(gatherMemoryStats, MEMORY_PROFILER_INTERVAL)
}
}

Expand Down Expand Up @@ -365,7 +348,7 @@ async function startProfiling (automation: Automation, spec: { fileName: string
totalMemoryLimit = await handler.getTotalMemoryLimit(),
])

await gatherMemoryStats(automation)
await gatherMemoryStats()
} catch (err) {
debug('error starting memory profiler: %o', err)
}
Expand Down
Loading

0 comments on commit 8347baa

Please sign in to comment.