From 5690683a58a281421c5d3c4a1644e45ce6348d8e Mon Sep 17 00:00:00 2001 From: Sag Date: Mon, 27 Apr 2026 12:22:47 +0200 Subject: [PATCH 01/10] Updated gift status to "consumed" when member upgrades to paid (#27539) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit closes https://linear.app/ghost/issue/BER-3477 Marks a member's redeemed gift as `consumed` (with `consumed_at = now`) when they upgrade to a paid subscription, instead of leaving the gift in `redeemed` status until its natural expiry. ## What changed - **Event subscription**: `GiftServiceWrapper` subscribes to `SubscriptionActivatedEvent` and consumes the redeemer's active gift on activation. Failures are caught and logged so an event-handler error never bubbles back into the subscription-activation flow. - **Shared `consume()` method**: extracted from `processConsumed` so both the scheduled batch job and the new event-driven path go through the same logic — re-fetch under `forUpdate`, no-op if not `redeemed` (idempotent), optional `transacting` for composition with an outer transaction. --- .../services/gifts/gift-service-wrapper.js | 17 ++++ .../server/services/gifts/gift-service.ts | 40 ++++++--- .../members/gift-subscriptions.test.js | 21 ++++- .../services/gifts/gift-service.test.ts | 83 +++++++++++++++++++ 4 files changed, 148 insertions(+), 13 deletions(-) diff --git a/ghost/core/core/server/services/gifts/gift-service-wrapper.js b/ghost/core/core/server/services/gifts/gift-service-wrapper.js index 9d80097b186..c554a027a65 100644 --- a/ghost/core/core/server/services/gifts/gift-service-wrapper.js +++ b/ghost/core/core/server/services/gifts/gift-service-wrapper.js @@ -16,6 +16,9 @@ class GiftServiceWrapper { const tiersService = require('../tiers'); const staffService = require('../staff'); const labsService = require('../../../shared/labs'); + const DomainEvents = require('@tryghost/domain-events'); + const logging = require('@tryghost/logging'); + const {SubscriptionActivatedEvent} = require('../../../shared/events'); const {GhostMailer} = require('../mail'); const settingsCache = require('../../../shared/settings-cache'); @@ -53,6 +56,20 @@ class GiftServiceWrapper { tiersService, labsService: labsService }); + + DomainEvents.subscribe(SubscriptionActivatedEvent, async (event) => { + try { + const gift = await this.service.getActiveByMember(event.data.memberId); + + if (!gift) { + return; + } + + await this.service.consume(gift.token); + } catch (err) { + logging.error(err, 'Failed to consume gift on paid subscription activation'); + } + }); } } diff --git a/ghost/core/core/server/services/gifts/gift-service.ts b/ghost/core/core/server/services/gifts/gift-service.ts index e3917d08065..035f0cf1d31 100644 --- a/ghost/core/core/server/services/gifts/gift-service.ts +++ b/ghost/core/core/server/services/gifts/gift-service.ts @@ -383,6 +383,31 @@ export class GiftService { return true; } + async consume(token: string, options: {transacting?: unknown} = {}): Promise { + const run = async (transacting: unknown) => { + // Fetch with a row lock to prevent race conditions under concurrency + const gift = await this.deps.giftRepository.getByToken(token, {transacting, forUpdate: true}); + + if (!gift || gift.status !== 'redeemed') { + return null; + } + + const consumed = gift.consume(); + + if (!consumed) { + return null; + } + + await this.deps.giftRepository.update(consumed, {transacting}); + + return consumed; + }; + + return options.transacting + ? await run(options.transacting) + : await this.deps.giftRepository.transaction(run); + } + async processConsumed(): Promise<{consumedCount: number; updatedMemberCount: number}> { const toConsume = await this.deps.giftRepository.findPendingConsumption(); @@ -395,32 +420,23 @@ export class GiftService { for (const gift of toConsume) { await this.deps.giftRepository.transaction(async (transacting) => { - // Re-fetch with a row lock to prevent races with concurrent refunds - const locked = await this.deps.giftRepository.getByToken(gift.token, {transacting, forUpdate: true}); - - if (locked?.status !== 'redeemed') { - return; - } - - const consumed = locked.consume(); + const consumed = await this.consume(gift.token, {transacting}); if (!consumed) { return; } - const member = await this.deps.memberRepository.get({id: locked.redeemerMemberId}, {transacting, forUpdate: true}); + const member = await this.deps.memberRepository.get({id: consumed.redeemerMemberId}, {transacting, forUpdate: true}); if (member && member.get('status') === 'gift') { await this.deps.memberRepository.update({ products: [], status: 'free' - }, {id: locked.redeemerMemberId, transacting}); + }, {id: consumed.redeemerMemberId, transacting}); updatedMemberCount += 1; } - await this.deps.giftRepository.update(consumed, {transacting}); - consumedCount += 1; }); } diff --git a/ghost/core/test/e2e-api/members/gift-subscriptions.test.js b/ghost/core/test/e2e-api/members/gift-subscriptions.test.js index d9889416684..382ab55c146 100644 --- a/ghost/core/test/e2e-api/members/gift-subscriptions.test.js +++ b/ghost/core/test/e2e-api/members/gift-subscriptions.test.js @@ -1041,7 +1041,7 @@ describe('Gift Subscriptions', function () { return {member, identityToken, gift, paidTier, consumesAt}; } - it('applies remaining gift days as trial and continues on the same tier/cadence as gift', async function () { + it('applies remaining gift days as trial and continues to checkout on the same tier/cadence', async function () { const {identityToken, gift, paidTier} = await setupGiftMember({cadence: 'month', consumesInDays: 30}); await membersAgent.post('/api/create-stripe-checkout-session/') @@ -1075,6 +1075,25 @@ describe('Gift Subscriptions', function () { assert.equal(giftRecord.get('tier_id'), paidTier.id); }); + it('marks the gift as consumed once the paid subscription activates', async function () { + const {member, gift} = await setupGiftMember({cadence: 'month', consumesInDays: 30}); + + // Starting point: gift redeemed, not yet consumed + assert.equal(gift.get('status'), 'redeemed'); + assert.equal(gift.get('consumed_at'), null); + + const customer = stripeMocker.createCustomer({email: member.get('email')}); + const price = await stripeMocker.getPriceForTier('default-product', 'month'); + + await stripeMocker.createSubscription({customer, price}); + await DomainEvents.allSettled(); + + // Gift should now be consumed + const giftAfterActivation = await models.Gift.findOne({token: gift.get('token')}, {require: true}); + assert.equal(giftAfterActivation.get('status'), 'consumed'); + assert.ok(giftAfterActivation.get('consumed_at'), 'Gift should have consumed_at set'); + }); + it('Returns 401 for unauthenticated members', async function () { await membersAgent.post('/api/create-stripe-checkout-session/') .body({ diff --git a/ghost/core/test/unit/server/services/gifts/gift-service.test.ts b/ghost/core/test/unit/server/services/gifts/gift-service.test.ts index f8b6e41e954..fd7a2a49f64 100644 --- a/ghost/core/test/unit/server/services/gifts/gift-service.test.ts +++ b/ghost/core/test/unit/server/services/gifts/gift-service.test.ts @@ -629,6 +629,89 @@ describe('GiftService', function () { }); }); + describe('consume', function () { + it('marks a redeemed gift as consumed and returns it', async function () { + const gift = buildRedeemedGift(); + + giftRepository.getByToken.resolves(gift); + + const service = createService(); + const result = await service.consume('gift-token'); + + assert.ok(result); + assert.equal(result.status, 'consumed'); + assert.notEqual(result.consumedAt, null); + + sinon.assert.calledOnce(giftRepository.transaction); + sinon.assert.calledOnceWithExactly(giftRepository.getByToken, 'gift-token', {transacting: 'trx', forUpdate: true}); + + sinon.assert.calledOnce(giftRepository.update); + + const [saved, options] = giftRepository.update.getCall(0).args; + + assert.equal(saved.status, 'consumed'); + assert.notEqual(saved.consumedAt, null); + assert.deepEqual(options, {transacting: 'trx'}); + }); + + it('returns null when the token does not exist', async function () { + giftRepository.getByToken.resolves(null); + + const service = createService(); + const result = await service.consume('missing-token'); + + assert.equal(result, null); + sinon.assert.notCalled(giftRepository.update); + }); + + it('returns null when the gift is no longer in redeemed status', async function () { + giftRepository.getByToken.resolves(buildGift({ + status: 'consumed', + redeemerMemberId: 'member_1', + redeemedAt: new Date('2025-04-01T00:00:00.000Z'), + consumesAt: new Date('2026-04-01T00:00:00.000Z'), + consumedAt: new Date('2026-04-01T00:00:00.000Z') + })); + + const service = createService(); + const result = await service.consume('gift-token'); + + assert.equal(result, null); + sinon.assert.notCalled(giftRepository.update); + }); + + it('uses an external transaction when provided instead of creating its own', async function () { + const gift = buildRedeemedGift(); + + giftRepository.getByToken.resolves(gift); + + const service = createService(); + const externalTrx = 'external-trx'; + const result = await service.consume('gift-token', {transacting: externalTrx}); + + assert.ok(result); + assert.equal(result.status, 'consumed'); + + sinon.assert.notCalled(giftRepository.transaction); + sinon.assert.calledOnceWithExactly(giftRepository.getByToken, 'gift-token', {transacting: externalTrx, forUpdate: true}); + sinon.assert.calledOnceWithExactly(giftRepository.update, sinon.match.any, {transacting: externalTrx}); + }); + + it('propagates errors from the repository update', async function () { + const gift = buildRedeemedGift(); + + giftRepository.getByToken.resolves(gift); + giftRepository.update.rejects(new Error('DB error')); + + const service = createService(); + + await assert.rejects( + () => service.consume('gift-token'), + {message: 'DB error'} + ); + }); + }); + describe('processExpired', function () { it('returns zero count when no gifts are pending expiration', async function () { giftRepository.findPendingExpiration.resolves([]); From a4a79e684ac8edd1f929b7b5a34ddd402fc6e426 Mon Sep 17 00:00:00 2001 From: Fabien O'Carroll Date: Mon, 27 Apr 2026 13:46:06 +0200 Subject: [PATCH 02/10] Added cache hit/miss/error/reset metrics (#27530) ref https://github.com/TryGhost/Ghost/pull/27530 Emits structured metrics from the Redis cache adapter for every operation: hit, miss, timeout, error, reset, and background refresh (triggered/succeeded/failed). Each metric carries an optional 'feature' dimension (via the new featureName config option) for dashboard filtering. Timed operations include a duration value. The timeout path uses a single-settlement guard to prevent double-counting when a timed-out lookup later rejects. set() accepts a throwOnError option so background refresh only emits 'succeeded' after a write that actually landed. --- .../adapters/lib/redis/AdapterCacheRedis.js | 65 ++++- .../lib/redis/adapter-cache-redis.test.js | 259 ++++++++++++++++++ 2 files changed, 320 insertions(+), 4 deletions(-) diff --git a/ghost/core/core/server/adapters/lib/redis/AdapterCacheRedis.js b/ghost/core/core/server/adapters/lib/redis/AdapterCacheRedis.js index be7809e87a6..c67e07388e0 100644 --- a/ghost/core/core/server/adapters/lib/redis/AdapterCacheRedis.js +++ b/ghost/core/core/server/adapters/lib/redis/AdapterCacheRedis.js @@ -2,6 +2,7 @@ const crypto = require('crypto'); const BaseCacheAdapter = require('@tryghost/adapter-base-cache'); const errors = require('@tryghost/errors'); const logging = require('@tryghost/logging'); +const metrics = require('@tryghost/metrics'); const debug = require('@tryghost/debug')('redis-cache'); const cacheManager = require('cache-manager'); const redisStoreFactory = require('./redis-store-factory'); @@ -22,6 +23,7 @@ class AdapterCacheRedis extends BaseCacheAdapter { * @param {number} [config.getTimeoutMilliseconds] - default timeout for cache get operations in *milliseconds* * @param {number} [config.refreshAheadFactor] - 0-1 number to use to determine how old (as a percentage of ttl) an entry should be before refreshing it * @param {string} [config.keyPrefix] - prefix to use when building a unique cache key, e.g.: 'some_id:image-sizes:' + * @param {string} [config.featureName] - name of the cache feature (e.g. 'postsPublic') used for dashboard filtering * @param {boolean} [config.reuseConnection] - specifies if the redis store/connection should be reused within the process */ constructor(config) { @@ -67,10 +69,24 @@ class AdapterCacheRedis extends BaseCacheAdapter { this.currentlyExecutingBackgroundRefreshes = new Set(); this.currentlyExecutingReads = new Map(); this._keyPrefix = config.keyPrefix || ''; + this._featureName = config.featureName; this._prefixHashInitInFlight = null; this.redisClient.on('error', this.handleRedisError); } + /** + * Underscore-private to survive api-framework's `_.cloneDeep` on the + * controller (and this adapter). See the "survives deep cloning" + * suite — `#`-private methods break on clones. + */ + _metric(name, extra = {}) { + const value = {...extra}; + if (this._featureName !== undefined) { + value.feature = this._featureName; + } + metrics.metric(name, value); + } + /** * api-framework's pipeline _.cloneDeep's controllers (and the cache * adapter alongside them). Caching the redis client as `this.redisClient` @@ -192,16 +208,35 @@ class AdapterCacheRedis extends BaseCacheAdapter { } return new Promise((resolve) => { + // `lookup` keeps running after the timer fires, so without a + // single-settlement guard a slow failing redis read would emit + // both `cache-timeout` and `cache-error` for the same request. + let settled = false; const timer = setTimeout(() => { + if (settled) { + return; + } + settled = true; debug('get', key, 'timeout'); + this._metric('cache-timeout'); resolve({internalKey: null, result: null}); }, this.getTimeoutMilliseconds); lookup.then((value) => { + if (settled) { + return; + } + settled = true; clearTimeout(timer); resolve(value); - }, () => { - // redis failure during lookup - treat as MISS, same as the timeout path + }, (err) => { + if (settled) { + return; + } + settled = true; clearTimeout(timer); + // redis failure during lookup - treat as MISS, same as the timeout path + this._metric('cache-error', {operation: 'get'}); + logging.error(err); resolve({internalKey: null, result: null}); }); }); @@ -216,6 +251,13 @@ class AdapterCacheRedis extends BaseCacheAdapter { try { const {internalKey, result} = await this._lookupWithTimeout(key); debug(`get ${key}: Cache ${result ? 'HIT' : 'MISS'}`); + if (result) { + this._metric('cache-hit'); + } else if (internalKey !== null) { + // A real miss; timeouts and lookup errors (internalKey === null) + // already emitted their own metric in `_lookupWithTimeout`. + this._metric('cache-miss'); + } if (!fetchData) { return result; } @@ -228,11 +270,15 @@ class AdapterCacheRedis extends BaseCacheAdapter { if (!isRefreshing && shouldRefresh) { debug(`Doing background refresh for ${key}`); this.currentlyExecutingBackgroundRefreshes.add(internalKey); + this._metric('cache-background-refresh-triggered'); + const refreshStart = performance.now(); fetchData().then(async (data) => { - await this.set(key, data); // We don't use `internalKey` here because `set` handles it + await this.set(key, data, {throwOnError: true}); // We don't use `internalKey` here because `set` handles it this.currentlyExecutingBackgroundRefreshes.delete(internalKey); + this._metric('cache-background-refresh-succeeded', {value: performance.now() - refreshStart}); }).catch((error) => { this.currentlyExecutingBackgroundRefreshes.delete(internalKey); + this._metric('cache-background-refresh-failed'); logging.error({ message: 'There was an error refreshing cache data in the background', error: error @@ -256,6 +302,7 @@ class AdapterCacheRedis extends BaseCacheAdapter { debug('set', internalKey); await this.cache.set(internalKey, data); } catch (err) { + this._metric('cache-error', {operation: 'set'}); logging.error(err); } }).catch(() => { @@ -267,6 +314,7 @@ class AdapterCacheRedis extends BaseCacheAdapter { return resultPromise; } } catch (err) { + this._metric('cache-error', {operation: 'get'}); logging.error(err); } } @@ -275,14 +323,20 @@ class AdapterCacheRedis extends BaseCacheAdapter { * * @param {string} key * @param {*} value + * @param {Object} [options] + * @param {boolean} [options.throwOnError] - if true, rethrow write errors after logging. Used by the background refresh path so success is only emitted after a write that actually succeeded. */ - async set(key, value) { + async set(key, value, {throwOnError = false} = {}) { try { const internalKey = await this._buildKey(key); debug('set', internalKey); return await this.cache.set(internalKey, value); } catch (err) { + this._metric('cache-error', {operation: 'set'}); logging.error(err); + if (throwOnError) { + throw err; + } } } @@ -295,9 +349,12 @@ class AdapterCacheRedis extends BaseCacheAdapter { async reset() { debug('reset'); + const t0 = performance.now(); try { await this.cyclePrefixHash(); + this._metric('cache-reset', {value: performance.now() - t0}); } catch (err) { + this._metric('cache-error', {operation: 'reset'}); logging.error(err); } } diff --git a/ghost/core/test/unit/server/adapters/lib/redis/adapter-cache-redis.test.js b/ghost/core/test/unit/server/adapters/lib/redis/adapter-cache-redis.test.js index 1e9b177da61..43bbea78d12 100644 --- a/ghost/core/test/unit/server/adapters/lib/redis/adapter-cache-redis.test.js +++ b/ghost/core/test/unit/server/adapters/lib/redis/adapter-cache-redis.test.js @@ -3,6 +3,7 @@ const _ = require('lodash'); const sinon = require('sinon'); const errors = require('@tryghost/errors'); const logging = require('@tryghost/logging'); +const metrics = require('@tryghost/metrics'); const RedisCache = require('../../../../../../core/server/adapters/lib/redis/AdapterCacheRedis'); const PREFIX_HASH = 'mock-prefix-hash'; @@ -33,14 +34,27 @@ function createCacheStub({keyPrefix = ''} = {}) { } describe('Adapter Cache Redis', function () { + let metricStub; + beforeEach(function () { sinon.stub(logging, 'error'); + metricStub = sinon.stub(metrics, 'metric'); }); afterEach(function () { sinon.restore(); }); + function metricCall(name) { + return metricStub.getCalls().find(c => c.args[0] === name); + } + + function flushMicrotasks() { + return new Promise((resolve) => { + setImmediate(resolve); + }); + } + it('can initialize Redis cache instance directly', async function () { const cache = new RedisCache({cache: createCacheStub()}); assert.ok(cache); @@ -500,6 +514,251 @@ describe('Adapter Cache Redis', function () { }); }); + describe('metrics', function () { + it('emits cache-hit with feature on a hit', async function () { + const cacheStub = createCacheStub(); + cacheStub.get.resolves('v'); + const cache = new RedisCache({cache: cacheStub, featureName: 'postsPublic'}); + + await cache.get('k'); + + const call = metricCall('cache-hit'); + assert.ok(call, 'cache-hit was emitted'); + assert.equal(call.args[1].feature, 'postsPublic'); + assert.equal(metricCall('cache-miss'), undefined); + }); + + it('emits cache-miss with feature on a genuine miss', async function () { + const cacheStub = createCacheStub(); + cacheStub.get.resolves(null); + const cache = new RedisCache({cache: cacheStub, featureName: 'postsPublic'}); + + await cache.get('k'); + + const call = metricCall('cache-miss'); + assert.ok(call, 'cache-miss was emitted'); + assert.equal(call.args[1].feature, 'postsPublic'); + assert.equal(metricCall('cache-hit'), undefined); + }); + + it('omits feature when featureName is not set', async function () { + const cacheStub = createCacheStub({keyPrefix: 'postsPublic:'}); + cacheStub.get.resolves('v'); + const cache = new RedisCache({cache: cacheStub, keyPrefix: 'postsPublic:'}); + + await cache.get('k'); + + assert.equal('feature' in metricCall('cache-hit').args[1], false); + }); + + it('emits cache-timeout (but not cache-miss) when the get exceeds the timeout', async function () { + const cacheStub = createCacheStub(); + cacheStub.get.callsFake(() => new Promise((resolve) => { + setTimeout(() => resolve('v'), 200); + })); + const cache = new RedisCache({ + cache: cacheStub, + featureName: 'postsPublic', + getTimeoutMilliseconds: 10 + }); + + await cache.get('k'); + + const timeoutCall = metricCall('cache-timeout'); + assert.ok(timeoutCall, 'cache-timeout was emitted'); + assert.equal(timeoutCall.args[1].feature, 'postsPublic'); + assert.equal(metricCall('cache-miss'), undefined, 'timeout is not also counted as a miss'); + }); + + it('does not double-emit when a timed-out lookup rejects later', async function () { + // If the timer fires first and the underlying redis read rejects + // afterwards, we should count the request once (as a timeout), + // not both as a timeout and as an error. + let rejectLookup; + const cacheStub = createCacheStub(); + cacheStub.get.callsFake(() => new Promise((_resolve, reject) => { + rejectLookup = reject; + })); + const cache = new RedisCache({ + cache: cacheStub, + featureName: 'postsPublic', + getTimeoutMilliseconds: 10 + }); + + await cache.get('k'); + rejectLookup(new Error('late failure')); + await new Promise((resolve) => { + setTimeout(resolve, 20); + }); + + assert.ok(metricCall('cache-timeout')); + assert.equal(metricCall('cache-error'), undefined, 'no cache-error emitted after timeout'); + }); + + it('emits cache-error (but not cache-miss) when a redis get rejects in timeout mode', async function () { + const cacheStub = createCacheStub(); + cacheStub.get.rejects(new Error('redis down')); + const cache = new RedisCache({ + cache: cacheStub, + featureName: 'postsPublic', + getTimeoutMilliseconds: 1000 + }); + + await cache.get('k'); + + const errorCall = metricCall('cache-error'); + assert.ok(errorCall, 'cache-error was emitted'); + assert.equal(errorCall.args[1].operation, 'get'); + assert.equal(errorCall.args[1].feature, 'postsPublic'); + assert.equal(metricCall('cache-miss'), undefined, 'redis error is not also counted as a miss'); + }); + + it('emits cache-error on a set failure', async function () { + const cacheStub = createCacheStub(); + cacheStub.set = sinon.stub().rejects(new Error('write failed')); + const cache = new RedisCache({cache: cacheStub, featureName: 'postsPublic'}); + + await cache.set('k', 'v'); + + const call = metricCall('cache-error'); + assert.ok(call); + assert.equal(call.args[1].operation, 'set'); + assert.equal(call.args[1].feature, 'postsPublic'); + }); + + it('emits cache-reset on successful reset', async function () { + const cacheStub = createCacheStub(); + const cache = new RedisCache({cache: cacheStub, featureName: 'postsPublic'}); + + await cache.reset(); + + const call = metricCall('cache-reset'); + assert.ok(call); + assert.equal(call.args[1].feature, 'postsPublic'); + assert.equal(typeof call.args[1].value, 'number'); + assert.equal(metricCall('cache-error'), undefined); + }); + + it('emits cache-error on a reset failure', async function () { + const cacheStub = createCacheStub(); + cacheStub.store.getClient().set.rejects(new Error('cycle failed')); + const cache = new RedisCache({cache: cacheStub, featureName: 'postsPublic'}); + + await cache.reset(); + + const errorCalls = metricStub.getCalls().filter(c => c.args[0] === 'cache-error'); + assert.equal(errorCalls.length, 1); + assert.equal(errorCalls[0].args[1].operation, 'reset'); + assert.equal(metricCall('cache-reset'), undefined); + }); + + it('emits background refresh triggered + succeeded on a successful refresh', async function () { + const KEY = 'bg-success'; + let cachedValue = 'Original'; + const cacheStub = createCacheStub(); + cacheStub.get.callsFake(async (key) => { + if (key === PREFIX_HASH + KEY) { + return cachedValue; + } + }); + cacheStub.set.callsFake(async (key, value) => { + if (key === PREFIX_HASH + KEY) { + cachedValue = value; + } + }); + cacheStub.ttl.resolves(5); + const cache = new RedisCache({ + cache: cacheStub, + featureName: 'postsPublic', + ttl: 100, + refreshAheadFactor: 0.2 + }); + + const fetchData = sinon.stub().resolves('Fresh'); + await cache.get(KEY, fetchData); + await flushMicrotasks(); + await flushMicrotasks(); + + assert.ok(metricCall('cache-background-refresh-triggered')); + assert.ok(metricCall('cache-background-refresh-succeeded')); + assert.equal(metricCall('cache-background-refresh-triggered').args[1].feature, 'postsPublic'); + assert.equal(typeof metricCall('cache-background-refresh-succeeded').args[1].value, 'number'); + assert.equal(metricCall('cache-background-refresh-failed'), undefined); + }); + + it('emits cache-background-refresh-failed when the refresh fetch rejects', async function () { + const KEY = 'bg-fetch-fail'; + const cachedValue = 'Original'; + const cacheStub = createCacheStub(); + cacheStub.get.callsFake(async (key) => { + if (key === PREFIX_HASH + KEY) { + return cachedValue; + } + }); + cacheStub.ttl.resolves(5); + const cache = new RedisCache({ + cache: cacheStub, + featureName: 'postsPublic', + ttl: 100, + refreshAheadFactor: 0.2 + }); + + const fetchData = sinon.stub().rejects(new Error('upstream down')); + await cache.get(KEY, fetchData); + await flushMicrotasks(); + await flushMicrotasks(); + + assert.ok(metricCall('cache-background-refresh-triggered')); + assert.ok(metricCall('cache-background-refresh-failed')); + assert.equal(metricCall('cache-background-refresh-succeeded'), undefined); + }); + + it('emits cache-background-refresh-failed when the cache write fails', async function () { + const KEY = 'bg-write-fail'; + const cachedValue = 'Original'; + const cacheStub = createCacheStub(); + cacheStub.get.callsFake(async (key) => { + if (key === PREFIX_HASH + KEY) { + return cachedValue; + } + }); + cacheStub.set = sinon.stub().rejects(new Error('write failed')); + cacheStub.ttl.resolves(5); + const cache = new RedisCache({ + cache: cacheStub, + featureName: 'postsPublic', + ttl: 100, + refreshAheadFactor: 0.2 + }); + + const fetchData = sinon.stub().resolves('Fresh'); + await cache.get(KEY, fetchData); + + await new Promise((resolve) => { + setTimeout(resolve, 10); + }); + + assert.ok(metricCall('cache-background-refresh-triggered')); + assert.ok(metricCall('cache-background-refresh-failed')); + assert.equal(metricCall('cache-background-refresh-succeeded'), undefined); + }); + + it('emits metrics correctly on a cloned adapter', async function () { + // Regression: `_metric` is underscore-private (not `#`-private) so + // that api-framework's _.cloneDeep doesn't turn metric emissions + // into "Receiver must be an instance of..." errors. + const cacheStub = createCacheStub(); + cacheStub.get.resolves('v'); + const cache = new RedisCache({cache: cacheStub, featureName: 'postsPublic'}); + + const cloned = _.cloneDeep(cache); + await cloned.get('k'); + + assert.ok(metricCall('cache-hit'), 'cache-hit was emitted on clone'); + assert.equal(metricCall('cache-hit').args[1].feature, 'postsPublic'); + }); + }); + describe('survives deep cloning', function () { // Regression: api-framework's pipeline calls _.cloneDeep on the // controller, which deep-clones the cache adapter instance. The clone From 02b03dbc30500a46f52c5123491c42ba2cc308fe Mon Sep 17 00:00:00 2001 From: Jonatan Svennberg Date: Mon, 27 Apr 2026 16:03:01 +0200 Subject: [PATCH 03/10] =?UTF-8?q?=F0=9F=90=9B=20Fixed=20stale=20member=20e?= =?UTF-8?q?mail=20suppression=20state=20on=20resignup=20(#27408)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit closes https://linear.app/ghost/issue/ONC-1640/member-with-email-disabled-repeatedly-being-sent-newsletters-for Always set member.email_disabled to true on 607 errors from Mailgun. When a suppressed member was deleted and someone later signed up with the same email, the new member was created with `email_disabled=false` even though the suppression record still existed. The send filter then included them in every newsletter. Mailgun rejected each send with a 607, triggering the suppression handler. But the handler silently swallowed the `ER_DUP_ENTRY` error from trying to re-insert the existing suppression record, never reaching the dispatch that would have set `email_disabled=true`. The cycle repeated indefinitely. --- .../mailgun-email-suppression-list.js | 32 +++-- .../services/member-bread-service.js | 5 + .../mailgun-email-suppression-list.test.js | 43 +++++- .../services/members-bread-service.test.js | 122 +++++++++++++++++- 4 files changed, 185 insertions(+), 17 deletions(-) diff --git a/ghost/core/core/server/services/email-suppression-list/mailgun-email-suppression-list.js b/ghost/core/core/server/services/email-suppression-list/mailgun-email-suppression-list.js index 52bd824943b..371bf7ecb12 100644 --- a/ghost/core/core/server/services/email-suppression-list/mailgun-email-suppression-list.js +++ b/ghost/core/core/server/services/email-suppression-list/mailgun-email-suppression-list.js @@ -117,31 +117,35 @@ class MailgunEmailSuppressionList extends AbstractEmailSuppressionList { async init() { this.Suppression = models.Suppression; const handleEvent = reason => async (event) => { - try { - if (reason === 'bounce') { - if (!Number.isInteger(event.error?.code)) { - return; - } - if (event.error.code !== 607 && event.error.code !== 605) { - return; - } + if (reason === 'bounce') { + if (!Number.isInteger(event.error?.code)) { + return; + } + if (event.error.code !== 607 && event.error.code !== 605) { + return; } + } + try { await this.Suppression.add({ email: event.email, email_id: event.emailId, reason: reason, created_at: event.timestamp }); - DomainEvents.dispatch(EmailSuppressedEvent.create({ - emailAddress: event.email, - emailId: event.emailId, - reason: reason - }, event.timestamp)); } catch (err) { - if (err.code !== 'ER_DUP_ENTRY') { + if (err.code !== 'ER_DUP_ENTRY' && err.code !== 'SQLITE_CONSTRAINT') { logging.error(err); + return; } + // Suppression already exists — still dispatch so any drifted + // member state (e.g. email_disabled=false) gets corrected. + logging.info(`Re-dispatching EmailSuppressedEvent for existing suppression (${reason}): ${event.email}`); } + DomainEvents.dispatch(EmailSuppressedEvent.create({ + emailAddress: event.email, + emailId: event.emailId, + reason: reason + }, event.timestamp)); }; DomainEvents.subscribe(EmailBouncedEvent, handleEvent('bounce')); DomainEvents.subscribe(SpamComplaintEvent, handleEvent('spam')); diff --git a/ghost/core/core/server/services/members/members-api/services/member-bread-service.js b/ghost/core/core/server/services/members/members-api/services/member-bread-service.js index a30c3b3dfc5..e66382b6f01 100644 --- a/ghost/core/core/server/services/members/members-api/services/member-bread-service.js +++ b/ghost/core/core/server/services/members/members-api/services/member-bread-service.js @@ -360,6 +360,11 @@ module.exports = class MemberBREADService { let model; try { + if (data.email && data.email_disabled === undefined) { + const isSuppressed = (await this.emailSuppressionList.getSuppressionData(data.email))?.suppressed; + data.email_disabled = !!isSuppressed; + } + const attribution = await this.memberAttributionService.getAttributionFromContext(options?.context); if (attribution) { data.attribution = attribution; diff --git a/ghost/core/test/integration/services/mailgun-email-suppression-list.test.js b/ghost/core/test/integration/services/mailgun-email-suppression-list.test.js index b5f0ffa9119..1700fd174b4 100644 --- a/ghost/core/test/integration/services/mailgun-email-suppression-list.test.js +++ b/ghost/core/test/integration/services/mailgun-email-suppression-list.test.js @@ -5,6 +5,7 @@ const DomainEvents = require('@tryghost/domain-events'); const MailgunClient = require('../../../core/server/services/lib/mailgun-client'); const emailAnalytics = require('../../../core/server/services/email-analytics'); +const models = require('../../../core/server/models'); describe('MailgunEmailSuppressionList', function () { let agent; @@ -12,7 +13,7 @@ describe('MailgunEmailSuppressionList', function () { before(async function () { agent = await agentProvider.getAdminAPIAgent(); - await fixtureManager.init('newsletters', 'members:newsletters', 'members:emails'); + await fixtureManager.init('newsletters', 'members:newsletters', 'members:emails:failed'); await agent.loginAsOwner(); sinon.stub(MailgunClient.prototype, 'fetchEvents').callsFake(async function (_, batchHandler) { @@ -171,6 +172,46 @@ describe('MailgunEmailSuppressionList', function () { assert.equal(memberAfter.email_suppression.suppressed, true, 'The member should have a suppressed email'); assert.equal(memberAfter.email_suppression.info.reason, 'spam'); }); + + it('Sets email_disabled on the member when a suppression record already exists (ONC-1640)', async function () { + // Regression test: if a Suppression record already exists for the address + // (e.g. from a deleted previous member), a bounce event for a new member + // with the same address used to silently ER_DUP_ENTRY and never dispatch + // EmailSuppressedEvent, leaving the member with email_disabled=false forever. + const emailBatch = fixtureManager.get('email_batches', 0); + const emailRecipient = fixtureManager.get('email_recipients', 5); + assert(emailRecipient.batch_id === emailBatch.id); + const memberId = emailRecipient.member_id; + const providerId = emailBatch.provider_id; + const recipient = emailRecipient.member_email; + const timestamp = new Date(2000, 0, 1); + + await models.Suppression.add({ + email: recipient, + email_id: emailBatch.email_id, + reason: 'bounce', + created_at: new Date(1999, 0, 1) + }); + + const memberBefore = await models.Member.findOne({id: memberId}, {require: true}); + assert.equal(memberBefore.get('email_disabled'), false, 'Test setup: the member should start with email_disabled=false'); + + events = [createPermanentFailedEvent({ + errorCode: 605, + providerId, + timestamp, + recipient + })]; + + await emailAnalytics.fetchLatestOpenedEvents(); + await DomainEvents.allSettled(); + + const memberAfter = await models.Member.findOne({id: memberId}, {require: true}); + assert.equal(memberAfter.get('email_disabled'), true, 'Member should be disabled even when the Suppression record already existed'); + + // Clean up so subsequent tests using this email aren't affected + await models.Suppression.destroy({destroyBy: {email: recipient}}); + }); }); function createPermanentFailedEvent({errorCode, providerId, timestamp, recipient}) { diff --git a/ghost/core/test/unit/server/services/members/members-api/services/members-bread-service.test.js b/ghost/core/test/unit/server/services/members/members-api/services/members-bread-service.test.js index 01c83bbdf88..ca49cf56ab2 100644 --- a/ghost/core/test/unit/server/services/members/members-api/services/members-bread-service.test.js +++ b/ghost/core/test/unit/server/services/members/members-api/services/members-bread-service.test.js @@ -45,6 +45,7 @@ describe('MemberBreadService', function () { const linkStripeCustomerStub = sinon.stub().resolves(); const createStub = sinon.stub().resolves(mockMemberModel); + const getSuppressionDataStub = sinon.stub().resolves({suppressed: false, info: null}); const memberRepository = { create: createStub, @@ -60,7 +61,7 @@ describe('MemberBreadService', function () { labsService: {isSet: sinon.stub().returns(false)}, newslettersService: {browse: sinon.stub().resolves([])}, settingsCache: {get: sinon.stub()}, - emailSuppressionList: {getSuppressionData: sinon.stub().resolves({suppressed: false, info: null})}, + emailSuppressionList: {getSuppressionData: getSuppressionDataStub}, settingsHelpers: {createUnsubscribeUrl: sinon.stub().returns('http://example.com/unsubscribe')} }); @@ -72,7 +73,7 @@ describe('MemberBreadService', function () { status: 'free' }); - return {service, memberRepository, linkStripeCustomerStub, createStub}; + return {service, memberRepository, linkStripeCustomerStub, createStub, getSuppressionDataStub}; } it('passes context to linkStripeCustomer when stripe_customer_id is provided', async function () { @@ -149,6 +150,123 @@ describe('MemberBreadService', function () { assert.ok(linkStripeCustomerOptions.context, 'context should be passed to linkStripeCustomer'); assert.equal(linkStripeCustomerOptions.context.import, true, 'context.import should be true'); }); + + it('sets email_disabled to true when the email is on the suppression list', async function () { + // Prevents ONC-1640: a previous member with this email bounced/complained, + // was deleted, and the suppression record remains. A new signup with that + // same address must inherit the disabled state instead of starting clean. + const {service, createStub, getSuppressionDataStub} = createService(); + getSuppressionDataStub.resolves({suppressed: true, info: {reason: 'spam'}}); + + await service.add({ + email: 'suppressed@example.com', + name: 'New Signup' + }, {}); + + assert.equal(createStub.calledOnce, true); + const createdData = createStub.firstCall.args[0]; + assert.equal(createdData.email_disabled, true); + }); + + it('sets email_disabled to false when the email is not on the suppression list', async function () { + const {service, createStub} = createService(); + + await service.add({ + email: 'clean@example.com', + name: 'Clean Signup' + }, {}); + + assert.equal(createStub.calledOnce, true); + const createdData = createStub.firstCall.args[0]; + assert.equal(createdData.email_disabled, false); + }); + + it('preserves explicit email_disabled when the email is on the suppression list', async function () { + const {service, createStub, getSuppressionDataStub} = createService(); + getSuppressionDataStub.resolves({suppressed: true, info: {reason: 'spam'}}); + + await service.add({ + email: 'suppressed@example.com', + name: 'New Signup', + email_disabled: false + }, {}); + + assert.equal(getSuppressionDataStub.called, false); + assert.equal(createStub.calledOnce, true); + const createdData = createStub.firstCall.args[0]; + assert.equal(createdData.email_disabled, false); + }); + }); + + describe('edit', function () { + function createMockMemberModel() { + return { + id: 'member_123', + get: sinon.stub().returns(false), + related: sinon.stub().returns({find: () => null, toJSON: () => [], models: []}), + toJSON: sinon.stub().returns({ + id: 'member_123', + email: 'test@example.com' + }) + }; + } + + function createService() { + const mockMemberModel = createMockMemberModel(); + const updateStub = sinon.stub().resolves(mockMemberModel); + const getSuppressionDataStub = sinon.stub().resolves({suppressed: false, info: null}); + + const service = new MemberBreadService({ + memberRepository: { + update: updateStub + }, + stripeService: {configured: false}, + memberAttributionService: {getAttributionFromContext: sinon.stub().resolves(null)}, + emailService: {}, + labsService: {isSet: sinon.stub().returns(false)}, + newslettersService: {browse: sinon.stub().resolves([])}, + settingsCache: {get: sinon.stub()}, + emailSuppressionList: {getSuppressionData: getSuppressionDataStub}, + settingsHelpers: {createUnsubscribeUrl: sinon.stub().returns('http://example.com/unsubscribe')} + }); + + sinon.stub(service, 'read').resolves({id: 'member_123'}); + + return {service, updateStub, getSuppressionDataStub}; + } + + it('sets email_disabled to true when the new email is on the suppression list', async function () { + const {service, updateStub, getSuppressionDataStub} = createService(); + getSuppressionDataStub.resolves({suppressed: true, info: {reason: 'spam'}}); + + await service.edit({ + email: 'suppressed@example.com' + }, {id: 'member_123'}); + + const updatedData = updateStub.firstCall.args[0]; + assert.equal(updatedData.email_disabled, true); + }); + + it('sets email_disabled to false when the new email is not on the suppression list', async function () { + const {service, updateStub} = createService(); + + await service.edit({ + email: 'clean@example.com' + }, {id: 'member_123'}); + + const updatedData = updateStub.firstCall.args[0]; + assert.equal(updatedData.email_disabled, false); + }); + + it('does not check the suppression list when email is not being changed', async function () { + const {service, getSuppressionDataStub} = createService(); + + await service.edit({ + name: 'New Name' + }, {id: 'member_123'}); + + assert.equal(getSuppressionDataStub.called, false); + }); }); describe('read', function () { From 67ceb9f6a1712be8e295cb3bb96ec899a60879bc Mon Sep 17 00:00:00 2001 From: Rob Lester Date: Tue, 21 Apr 2026 18:04:53 +0100 Subject: [PATCH 04/10] Refactored admin-auth iframe message handler dispatch Consolidated the per-action branches in the admin-auth iframe bridge into a single actions table with one shared dispatcher. Each action declares its URL template, and the listener funnels every call through one path: parse payload, look up action, build URL, fetch, post response back. Adding a new action becomes one table entry rather than a new branch in a switch. Pulled identifier validation (24-char hex ObjectID shape) and querystring construction into small helpers at the top of the module. Identifier shape is checked once at dispatch, so URLs built from message-supplied IDs reject malformed input before reaching fetch. Unknown actions are ignored, malformed JSON no longer breaks dispatch, and the parse-failure diagnostic log now includes the bridge name and payload. No behaviour change for the comments UI, which already sends valid ObjectIDs. A small ESLint override under core/frontend/src/admin-auth/** lints the bridge in a browser environment without Node-only rules (no-console, ghost-custom/no-native-error). Credit: Jorian Woltjer --- ghost/core/.eslintrc.js | 15 ++ .../public/admin-auth/admin-auth.min.js | 2 +- .../src/admin-auth/message-handler.js | 140 ++++++----------- .../src/admin-auth-message-handler.test.js | 144 ++++++++++++++++++ 4 files changed, 209 insertions(+), 92 deletions(-) create mode 100644 ghost/core/test/unit/frontend/src/admin-auth-message-handler.test.js diff --git a/ghost/core/.eslintrc.js b/ghost/core/.eslintrc.js index a07a00f37a0..b23430cd2a7 100644 --- a/ghost/core/.eslintrc.js +++ b/ghost/core/.eslintrc.js @@ -124,6 +124,21 @@ module.exports = { 'ghost/filenames/match-regex': ['error', '^[a-z0-9_.-]+$', false] } }, + { + // Browser-only scripts served by the admin iframe bridge. They run in + // the reader's browser, so `window`/`fetch` etc. are real globals, + // Ghost's Node-only `@tryghost/errors` classes are unavailable, and + // `console` is the only diagnostic channel available for debugging + // bridge configuration/integration issues in devtools. + files: 'core/frontend/src/admin-auth/**/*.js', + env: { + browser: true + }, + rules: { + 'ghost/ghost-custom/no-native-error': 'off', + 'no-console': 'off' + } + }, /** * @TODO: enable these soon */ diff --git a/ghost/core/core/frontend/public/admin-auth/admin-auth.min.js b/ghost/core/core/frontend/public/admin-auth/admin-auth.min.js index 645b57ea65f..60efe274b4c 100644 --- a/ghost/core/core/frontend/public/admin-auth/admin-auth.min.js +++ b/ghost/core/core/frontend/public/admin-auth/admin-auth.min.js @@ -1 +1 @@ -"use strict";const adminUrl=window.location.href.replace("auth-frame/","")+"api/admin",siteOrigin="{{SITE_ORIGIN}}";window.addEventListener("message",async function(a){if(a.origin!==siteOrigin){console.warn("Ignored message to admin auth iframe because of mismatch in origin","expected",siteOrigin,"got",a.origin,"with data",a.data);return}let n=null;try{n=JSON.parse(a.data)}catch(t){console.error(t)}function e(t,s){a.source.postMessage(JSON.stringify({uid:n.uid,error:t,result:s}),siteOrigin)}if(n.action==="browseComments")try{const{postId:t,params:s}=n,o=await(await fetch(adminUrl+`/comments/post/${t}/?${new URLSearchParams(s).toString()}`)).json();e(null,o)}catch(t){e(t,null)}if(n.action==="getReplies")try{const{commentId:t,params:s}=n,o=await(await fetch(adminUrl+`/comments/${t}/replies/?${new URLSearchParams(s).toString()}`)).json();e(null,o)}catch(t){e(t,null)}if(n.action==="readComment")try{const{commentId:t,params:s}=n,o=await(await fetch(adminUrl+"/comments/"+t+"/?"+new URLSearchParams(s).toString())).json();e(null,o)}catch(t){e(t,null)}if(n.action==="getUser")try{const s=await(await fetch(adminUrl+"/users/me/?include=roles")).json();e(null,s)}catch(t){e(t,null)}if(n.action==="hideComment")try{const s=await(await fetch(adminUrl+"/comments/"+n.id+"/",{method:"PUT",body:JSON.stringify({comments:[{id:n.id,status:"hidden"}]}),headers:{"Content-Type":"application/json"}})).json();e(null,s)}catch(t){e(t,null)}if(n.action==="showComment")try{const s=await(await fetch(adminUrl+"/comments/"+n.id+"/",{method:"PUT",body:JSON.stringify({comments:[{id:n.id,status:"published"}]}),headers:{"Content-Type":"application/json"}})).json();e(null,s)}catch(t){e(t,null)}}); +"use strict";const adminUrl=window.location.href.replace("auth-frame/","")+"api/admin",siteOrigin="{{SITE_ORIGIN}}";function id(e){if(typeof e!="string"||!/^[a-f0-9]{24}$/i.test(e))throw new Error("Invalid identifier");return e}function qs(e){const t=new URLSearchParams(e).toString();return t?"?"+t:""}function setCommentStatus(e,t){const s=id(e);return fetch(`${adminUrl}/comments/${s}/`,{method:"PUT",body:JSON.stringify({comments:[{id:s,status:t}]}),headers:{"Content-Type":"application/json"}})}const actions={browseComments:e=>fetch(`${adminUrl}/comments/post/${id(e.postId)}/${qs(e.params)}`),getReplies:e=>fetch(`${adminUrl}/comments/${id(e.commentId)}/replies/${qs(e.params)}`),readComment:e=>fetch(`${adminUrl}/comments/${id(e.commentId)}/${qs(e.params)}`),getUser:()=>fetch(`${adminUrl}/users/me/?include=roles`),hideComment:e=>setCommentStatus(e.id,"hidden"),showComment:e=>setCommentStatus(e.id,"published")};window.addEventListener("message",async function(e){if(e.origin!==siteOrigin){console.warn("Ignored message to admin auth iframe because of mismatch in origin","expected",siteOrigin,"got",e.origin,"with data",e.data);return}let t;try{t=JSON.parse(e.data)}catch(n){console.error("Admin auth iframe failed to parse message from site origin:",e.data,n);return}function s(n,o){e.source.postMessage(JSON.stringify({uid:t.uid,error:n?n.message:null,result:o}),siteOrigin)}const i=actions[t.action];if(i)try{const o=await(await i(t)).json();s(null,o)}catch(n){s(n,null)}}); diff --git a/ghost/core/core/frontend/src/admin-auth/message-handler.js b/ghost/core/core/frontend/src/admin-auth/message-handler.js index 7803b6064c0..76d1baeb63e 100644 --- a/ghost/core/core/frontend/src/admin-auth/message-handler.js +++ b/ghost/core/core/frontend/src/admin-auth/message-handler.js @@ -3,114 +3,72 @@ const adminUrl = window.location.href.replace('auth-frame/', '') + 'api/admin'; // At compile time, we'll replace the value with the actual origin. const siteOrigin = '{{SITE_ORIGIN}}'; +function id(value) { + if (typeof value !== 'string' || !/^[a-f0-9]{24}$/i.test(value)) { + throw new Error('Invalid identifier'); + } + return value; +} + +function qs(params) { + const s = new URLSearchParams(params).toString(); + return s ? '?' + s : ''; +} + +function setCommentStatus(commentId, status) { + const safeId = id(commentId); + return fetch(`${adminUrl}/comments/${safeId}/`, { + method: 'PUT', + body: JSON.stringify({ + comments: [{id: safeId, status}] + }), + headers: { + 'Content-Type': 'application/json' + } + }); +} + +const actions = { + browseComments: d => fetch(`${adminUrl}/comments/post/${id(d.postId)}/${qs(d.params)}`), + getReplies: d => fetch(`${adminUrl}/comments/${id(d.commentId)}/replies/${qs(d.params)}`), + readComment: d => fetch(`${adminUrl}/comments/${id(d.commentId)}/${qs(d.params)}`), + getUser: () => fetch(`${adminUrl}/users/me/?include=roles`), + hideComment: d => setCommentStatus(d.id, 'hidden'), + showComment: d => setCommentStatus(d.id, 'published') +}; + window.addEventListener('message', async function (event) { if (event.origin !== siteOrigin) { console.warn('Ignored message to admin auth iframe because of mismatch in origin', 'expected', siteOrigin, 'got', event.origin, 'with data', event.data); return; } - let data = null; + + let data; try { data = JSON.parse(event.data); } catch (err) { - console.error(err); + console.error('Admin auth iframe failed to parse message from site origin:', event.data, err); + return; } function respond(error, result) { event.source.postMessage(JSON.stringify({ uid: data.uid, - error: error, - result: result + error: error ? error.message : null, + result }), siteOrigin); } - if (data.action === 'browseComments') { - try { - const {postId, params} = data; - const res = await fetch( - adminUrl + `/comments/post/${postId}/?${new URLSearchParams(params).toString()}` - ); - const json = await res.json(); - respond(null, json); - } catch (err) { - respond(err, null); - } - } - - if (data.action === 'getReplies') { - try { - const {commentId, params} = data; - const res = await fetch( - adminUrl + `/comments/${commentId}/replies/?${new URLSearchParams(params).toString()}` - ); - const json = await res.json(); - respond(null, json); - } catch (err) { - respond(err, null); - } - } - - if (data.action === 'readComment') { - try { - const {commentId, params} = data; - const res = await fetch(adminUrl + '/comments/' + commentId + '/' + '?' + new URLSearchParams(params).toString()); - const json = await res.json(); - respond(null, json); - } catch (err) { - respond(err, null); - } - } - - if (data.action === 'getUser') { - try { - const res = await fetch( - adminUrl + '/users/me/?include=roles' - ); - const json = await res.json(); - respond(null, json); - } catch (err) { - respond(err, null); - } - } - - if (data.action === 'hideComment') { - try { - const res = await fetch(adminUrl + '/comments/' + data.id + '/', { - method: 'PUT', - body: JSON.stringify({ - comments: [{ - id: data.id, - status: 'hidden' - }] - }), - headers: { - 'Content-Type': 'application/json' - } - }); - const json = await res.json(); - respond(null, json); - } catch (err) { - respond(err, null); - } + const handler = actions[data.action]; + if (!handler) { + return; } - if (data.action === 'showComment') { - try { - const res = await fetch(adminUrl + '/comments/' + data.id + '/', { - method: 'PUT', - body: JSON.stringify({ - comments: [{ - id: data.id, - status: 'published' - }] - }), - headers: { - 'Content-Type': 'application/json' - } - }); - const json = await res.json(); - respond(null, json); - } catch (err) { - respond(err, null); - } + try { + const res = await handler(data); + const json = await res.json(); + respond(null, json); + } catch (err) { + respond(err, null); } }); diff --git a/ghost/core/test/unit/frontend/src/admin-auth-message-handler.test.js b/ghost/core/test/unit/frontend/src/admin-auth-message-handler.test.js new file mode 100644 index 00000000000..f1ac77bf167 --- /dev/null +++ b/ghost/core/test/unit/frontend/src/admin-auth-message-handler.test.js @@ -0,0 +1,144 @@ +const assert = require('node:assert/strict'); +const fs = require('fs'); +const path = require('path'); +const vm = require('vm'); + +const SOURCE_PATH = path.join( + __dirname, + '../../../../core/frontend/src/admin-auth/message-handler.js' +); + +const SITE_ORIGIN = 'https://site.example.com'; +const ADMIN_HREF = 'https://admin.example.com/ghost/auth-frame/'; +const VALID_ID = 'a'.repeat(24); + +function loadHandler(fetchStub) { + const source = fs + .readFileSync(SOURCE_PATH, 'utf8') + .replace('{{SITE_ORIGIN}}', SITE_ORIGIN); + + let captured = null; + const context = { + window: { + location: {href: ADMIN_HREF}, + addEventListener(_event, fn) { + captured = fn; + } + }, + fetch: fetchStub, + URLSearchParams + }; + + vm.createContext(context); + vm.runInContext(source, context); + + assert.ok(captured, 'message listener was not registered'); + return captured; +} + +function buildEvent(data) { + const responses = []; + const event = { + origin: SITE_ORIGIN, + data: JSON.stringify(data), + source: { + postMessage(payload) { + responses.push(JSON.parse(payload)); + } + } + }; + return {event, responses}; +} + +async function dispatch(data) { + const calls = []; + const fetchStub = async (url, opts) => { + calls.push({url, opts}); + return {json: async () => ({ok: true})}; + }; + const listener = loadHandler(fetchStub); + const {event, responses} = buildEvent(data); + await listener(event); + return {calls, responses}; +} + +const INVALID_IDS = [ + {label: 'relative-path segment', value: '../../users/me/token'}, + {label: 'non-hex characters', value: 'ghijghijghijghijghijghij'}, + {label: 'wrong length', value: 'a'.repeat(23)}, + {label: 'non-string', value: undefined} +]; + +const ACTION_CASES = [ + {action: 'browseComments', idField: 'postId', expectedPath: `/comments/post/${VALID_ID}/`}, + {action: 'getReplies', idField: 'commentId', expectedPath: `/comments/${VALID_ID}/replies/`}, + {action: 'readComment', idField: 'commentId', expectedPath: `/comments/${VALID_ID}/`}, + {action: 'hideComment', idField: 'id', expectedPath: `/comments/${VALID_ID}/`, expectedMethod: 'PUT'}, + {action: 'showComment', idField: 'id', expectedPath: `/comments/${VALID_ID}/`, expectedMethod: 'PUT'} +]; + +describe('admin-auth message-handler', function () { + describe('origin check', function () { + it('ignores messages from unexpected origins', async function () { + const listener = loadHandler(async () => assert.fail('fetch should not be called')); + await listener({ + origin: 'https://attacker.example.com', + data: JSON.stringify({action: 'browseComments', postId: VALID_ID}), + source: { + postMessage() { + assert.fail('postMessage should not be called'); + } + } + }); + }); + }); + + for (const testCase of ACTION_CASES) { + describe(`action ${testCase.action}`, function () { + it('dispatches a request when the identifier is a valid ObjectID', async function () { + const {calls, responses} = await dispatch({ + uid: 'ok', + action: testCase.action, + [testCase.idField]: VALID_ID + }); + + assert.equal(calls.length, 1, 'fetch should be called once'); + assert.ok( + calls[0].url.includes(testCase.expectedPath), + `expected URL to contain ${testCase.expectedPath}, got ${calls[0].url}` + ); + if (testCase.expectedMethod) { + assert.equal(calls[0].opts.method, testCase.expectedMethod); + } + assert.equal(responses.length, 1); + assert.equal(responses[0].uid, 'ok'); + assert.equal(responses[0].error, null); + }); + + for (const bad of INVALID_IDS) { + it(`rejects ${bad.label} without calling fetch`, async function () { + const {calls, responses} = await dispatch({ + uid: 'bad', + action: testCase.action, + [testCase.idField]: bad.value + }); + + assert.equal(calls.length, 0, 'fetch must not be called'); + assert.equal(responses.length, 1); + assert.equal(responses[0].uid, 'bad'); + assert.equal(responses[0].error, 'Invalid identifier'); + assert.equal(responses[0].result, null); + }); + } + }); + } + + describe('getUser', function () { + it('still works (no identifier to validate)', async function () { + const {calls, responses} = await dispatch({uid: 'u', action: 'getUser'}); + assert.equal(calls.length, 1); + assert.ok(calls[0].url.includes('/users/me/')); + assert.equal(responses[0].error, null); + }); + }); +}); From 41ff0ce6ed77bb4f67d4f817d858e0fb44caffd3 Mon Sep 17 00:00:00 2001 From: Rob Lester Date: Wed, 22 Apr 2026 10:24:13 +0100 Subject: [PATCH 05/10] =?UTF-8?q?=F0=9F=90=9B=20Added=20bookmark=20card=20?= =?UTF-8?q?fallback=20for=20non-allowlisted=20oEmbed=20responses?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit fetchOembedData returns provider-supplied embed html for rich and video oEmbed response types. The fallback path — used when the source isn't on the known-provider allowlist — now drops that html and treats the response as a bookmark candidate, so the caller falls back to a bookmark card. Allowlisted providers (YouTube, Vimeo, Twitter, Spotify, SoundCloud, etc.) continue to use @extractus/oembed-extractor's allowlist via knownProvider, unchanged. photo-type responses are unchanged. Credit: Jorian Woltjer --- .../server/services/oembed/oembed-service.js | 12 ++++ ghost/core/test/e2e-api/admin/oembed.test.js | 43 ++++++++++-- .../services/oembed/oembed-service.test.js | 66 ++++++++++++++----- 3 files changed, 101 insertions(+), 20 deletions(-) diff --git a/ghost/core/core/server/services/oembed/oembed-service.js b/ghost/core/core/server/services/oembed/oembed-service.js index 462ff10e84a..8c622a4119a 100644 --- a/ghost/core/core/server/services/oembed/oembed-service.js +++ b/ghost/core/core/server/services/oembed/oembed-service.js @@ -444,6 +444,18 @@ class OEmbedService { return; } + // `rich` and `video` responses ship provider-supplied HTML that gets + // stored in the post's Lexical payload and rendered into the admin + // editor preview (via srcdoc) and into public themes (via innerHTML). + // Known providers (YouTube, Twitter, etc.) go through `knownProvider` + // with @extractus/oembed-extractor's allowlist — anything reaching + // here is an arbitrary site's self-declared oEmbed endpoint, which we + // must not trust. Drop the response and let the caller fall back to + // a bookmark card. + if (oembed.type === 'video' || oembed.type === 'rich') { + return; + } + // return the extracted object, don't pass through the response body return oembed; } diff --git a/ghost/core/test/e2e-api/admin/oembed.test.js b/ghost/core/test/e2e-api/admin/oembed.test.js index 5e0c855c627..d5544c54b34 100644 --- a/ghost/core/test/e2e-api/admin/oembed.test.js +++ b/ghost/core/test/e2e-api/admin/oembed.test.js @@ -536,6 +536,10 @@ describe('Oembed API', function () { }); it('strips unknown response fields', async function () { + // Uses `photo` because unknown providers returning rich/video are + // rejected outright as a security measure (ONC-1648); see the next + // test. `photo` still passes through so we can verify the legacy + // field-stripping behaviour. const pageMock = nock('http://test.com') .get('/') .reply(200, ''); @@ -544,8 +548,8 @@ describe('Oembed API', function () { .get('/oembed') .reply(200, { version: '1.0', - type: 'video', - html: '

Test

', + type: 'photo', + url: 'http://test.com/photo.jpg', width: 200, height: 100, unknown: 'test' @@ -563,14 +567,45 @@ describe('Oembed API', function () { assert.deepEqual(res.body, { version: '1.0', - type: 'video', - html: '

Test

', + type: 'photo', + url: 'http://test.com/photo.jpg', width: 200, height: 100 }); assert.equal(res.body.unknown, undefined); }); + it('rejects rich/video responses from non-allowlisted providers (ONC-1648)', async function () { + // Self-declared oEmbed endpoints cannot return safe HTML for + // embedding in the admin preview or on the public site. Known + // providers (YouTube, Twitter, …) go through `knownProvider` with + // @extractus's allowlist; anything reaching this fallback path is + // an arbitrary site and must not propagate its html. + const pageMock = nock('http://test.com') + .get('/') + .reply(200, ''); + + const oembedMock = nock('http://test.com') + .get('/oembed') + .reply(200, { + version: '1.0', + type: 'video', + html: '', + width: 200, + height: 100 + }); + + const url = encodeURIComponent('http://test.com'); + await request.get(localUtils.API.getApiQuery(`oembed/?url=${url}`)) + .set('Origin', config.get('url')) + .expect('Content-Type', /json/) + .expect('Cache-Control', testUtils.cacheRules.private) + .expect(422); + + assert.equal(pageMock.isDone(), true); + assert.equal(oembedMock.isDone(), true); + }); + it('skips fetching IPv4 addresses', async function () { dnsPromises.lookup.restore(); sinon.stub(dnsPromises, 'lookup').callsFake(function (hostname) { diff --git a/ghost/core/test/unit/server/services/oembed/oembed-service.test.js b/ghost/core/test/unit/server/services/oembed/oembed-service.test.js index a1578a4df48..6e704a6be62 100644 --- a/ghost/core/test/unit/server/services/oembed/oembed-service.test.js +++ b/ghost/core/test/unit/server/services/oembed/oembed-service.test.js @@ -104,35 +104,69 @@ describe('oembed-service', function () { }); }); - describe('fetchOembedDataFromUrl', function () { - it('allows rich embeds to skip height field', async function () { + describe('fetchOembedData', function () { + const pageHtml = ``; + + it('drops rich-type responses from non-allowlisted providers (ONC-1648)', async function () { + // Self-declared oEmbed endpoints for arbitrary sites cannot be trusted + // to return safe HTML — known providers (Twitter, YouTube, …) go through + // `knownProvider` which uses an allowlist. Rich/video responses reaching + // this fallback path must not propagate their html into post content. nock('https://www.example.com') - .get('/') - .query(true) - .reply(200, ``); + .get('/oembed') + .reply(200, { + type: 'rich', + version: '1.0', + title: 'Test Title', + html: '', + width: 640, + height: 480 + }); + + const response = await oembedService.fetchOembedData('https://www.example.com', pageHtml); + // Returning undefined signals the caller to fall back to a bookmark + // card instead of storing the attacker-controlled html. + assert.equal(response, undefined); + }); + + it('drops video-type responses from non-allowlisted providers (ONC-1648)', async function () { nock('https://www.example.com') .get('/oembed') - .query(true) .reply(200, { - type: 'rich', + type: 'video', version: '1.0', title: 'Test Title', - author_name: 'Test Author', - author_url: 'https://example.com/user/testauthor', - html: '', + html: '', width: 640, - height: null + height: 480 }); - const response = await oembedService.fetchOembedDataFromUrl('https://www.example.com'); + const response = await oembedService.fetchOembedData('https://www.example.com', pageHtml); - assert.equal(response.title, 'Test Title'); - assert.equal(response.author_name, 'Test Author'); - assert.equal(response.author_url, 'https://example.com/user/testauthor'); - assert.equal(response.html, ''); + assert.equal(response, undefined); }); + it('still returns photo-type responses from non-allowlisted providers', async function () { + nock('https://www.example.com') + .get('/oembed') + .reply(200, { + type: 'photo', + version: '1.0', + title: 'Test Title', + url: 'https://www.example.com/photo.jpg', + width: 640, + height: 480 + }); + + const response = await oembedService.fetchOembedData('https://www.example.com', pageHtml); + + assert.equal(response.type, 'photo'); + assert.equal(response.url, 'https://www.example.com/photo.jpg'); + }); + }); + + describe('fetchOembedDataFromUrl', function () { it('uses a known user-agent for bookmark requests', async function () { nock('https://www.example.com') .get('/') From f98cf84fe63dee84738022fa1f44a724d65dfb7a Mon Sep 17 00:00:00 2001 From: Rob Lester Date: Wed, 22 Apr 2026 14:15:23 +0100 Subject: [PATCH 06/10] =?UTF-8?q?=F0=9F=90=9B=20Fixed=20admin=20session=20?= =?UTF-8?q?not=20rotating=20on=20password=20change=20and=20reset?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Password change and password reset destroyed every other active session for the user, but the originating session was preserved unchanged — including its session_id. The originating browser stayed logged in (intended), but the cookie value didn't refresh. Both endpoints now call req.session.regenerate() and re-assign the verified user to the new session, so the originating browser keeps its login with a fresh session_id. Extracted as rotateAndAssignVerifiedUserToSession to share the logic across the two endpoints. Session.destroy is now idempotent: a missing row is treated as success. Needed because req.session.regenerate() calls store.destroy() on a row the model-layer destroy loop has already removed. --- .../server/api/endpoints/authentication.js | 9 +- ghost/core/core/server/api/endpoints/users.js | 31 ++- ghost/core/core/server/models/session.js | 9 +- ghost/core/core/server/models/user.js | 9 +- .../services/auth/session/session-service.js | 29 ++ .../admin/__snapshots__/users.test.js.snap | 3 + .../admin/session-invalidation.test.js | 249 ++++++++++++++++++ ghost/core/test/e2e-api/admin/users.test.js | 7 +- 8 files changed, 329 insertions(+), 17 deletions(-) create mode 100644 ghost/core/test/e2e-api/admin/session-invalidation.test.js diff --git a/ghost/core/core/server/api/endpoints/authentication.js b/ghost/core/core/server/api/endpoints/authentication.js index 0c44a67b702..d1b2887312b 100644 --- a/ghost/core/core/server/api/endpoints/authentication.js +++ b/ghost/core/core/server/api/endpoints/authentication.js @@ -179,11 +179,12 @@ const controller = { }); } - const origin = auth.session.getOriginOfRequest(frame.original); - await auth.session.sessionService.assignVerifiedUserToSession({ - session: frame.original.session, + // Rotate the session_id and mint a fresh verified session so that + // any stolen or cloned copy of the pre-reset cookie is rejected + // on its next request. + await auth.session.sessionService.rotateAndAssignVerifiedUserToSession({ + req: frame.original.session.req, user: doResetParams.user, - origin, ip: frame.options.ip }); diff --git a/ghost/core/core/server/api/endpoints/users.js b/ghost/core/core/server/api/endpoints/users.js index 2018c185252..540a4a6edcc 100644 --- a/ghost/core/core/server/api/endpoints/users.js +++ b/ghost/core/core/server/api/endpoints/users.js @@ -30,6 +30,27 @@ function getTargetId(frame) { return frame.options.id === 'me' ? frame.user.id : frame.options.id; } +// When a user changes their own password we destroy all of their sessions in +// the model, then rotate the session_id here and mint a fresh verified session +// for the current browser. Rotating invalidates any cloned or stolen copy of +// the pre-change cookie. +async function rotateSessionForSelfPasswordChange(frame, user) { + const targetUserId = frame.data.password[0].user_id; + const currentUserId = frame.options.context && frame.options.context.user; + if (targetUserId !== currentUserId) { + return; + } + const req = frame.original.session && frame.original.session.req; + if (!req) { + return; + } + await auth.session.sessionService.rotateAndAssignVerifiedUserToSession({ + req, + user, + ip: frame.options.ip + }); +} + async function fetchOrCreatePersonalToken(userId) { const token = await models.ApiKey.findOne({user_id: userId}, {}); @@ -217,6 +238,9 @@ const controller = { headers: { cacheInvalidate: false }, + options: [ + 'ip' + ], validation: { docName: 'password', data: { @@ -232,9 +256,10 @@ const controller = { return frame.data.password[0].user_id; } }, - query(frame) { - frame.options.skipSessionID = frame.original.session.id; - return models.User.changePassword(frame.data.password[0], frame.options); + async query(frame) { + const result = await models.User.changePassword(frame.data.password[0], frame.options); + await rotateSessionForSelfPasswordChange(frame, result); + return result; } }, diff --git a/ghost/core/core/server/models/session.js b/ghost/core/core/server/models/session.js index 367452bc02b..848f4e2a761 100644 --- a/ghost/core/core/server/models/session.js +++ b/ghost/core/core/server/models/session.js @@ -40,12 +40,13 @@ const Session = ghostBookshelf.Model.extend({ } const options = this.filterOptions(unfilteredOptions, 'destroy'); - // Fetch the object before destroying it, so that the changed data is available to events + // Fetch the object before destroying it, so that the changed data is available to events. + // A missing row is treated as success: password-change flows destroy + // every session for the user, then call req.session.regenerate(), which + // asks the store to destroy the same session_id a second time. return this.forge({session_id: options.session_id}) .fetch(options) - .then((obj) => { - return obj.destroy(options); - }); + .then(obj => obj?.destroy(options)); }, destroyAll() { diff --git a/ghost/core/core/server/models/user.js b/ghost/core/core/server/models/user.js index 0014b7efa89..c496bd5b52a 100644 --- a/ghost/core/core/server/models/user.js +++ b/ghost/core/core/server/models/user.js @@ -1056,7 +1056,6 @@ User = ghostBookshelf.Model.extend({ const userId = object.user_id; const oldPassword = object.oldPassword; const isLoggedInUser = userId === options.context.user; - const skipSessionID = unfilteredOptions.skipSessionID; options.require = true; options.withRelated = ['sessions']; @@ -1072,11 +1071,13 @@ User = ghostBookshelf.Model.extend({ const updatedUser = await user.save({password: newPassword}); + // Destroy every active session for this user. The caller must mint a + // fresh session (with a new session_id) for self password-changes so + // that stolen or cloned cookies are invalidated alongside distinct + // concurrent sessions. const sessions = user.related('sessions'); for (const session of sessions) { - if (session.get('session_id') !== skipSessionID) { - await session.destroy(options); - } + await session.destroy(options); } return updatedUser; diff --git a/ghost/core/core/server/services/auth/session/session-service.js b/ghost/core/core/server/services/auth/session/session-service.js index 8b050c02381..f6c4a400d5c 100644 --- a/ghost/core/core/server/services/auth/session/session-service.js +++ b/ghost/core/core/server/services/auth/session/session-service.js @@ -258,6 +258,34 @@ module.exports = function createSessionService({ invalidateAuthCodeChallenge(session); } + /** + * rotateAndAssignVerifiedUserToSession + * Regenerates the Express session (issuing a new session_id) and then + * assigns the verified user to the new session. Used after a password + * change or reset so that any cloned or stolen copy of the pre-change + * cookie is rejected on its next request. + * + * @param {{req: Req, user: User, ip?: string}} options + * @returns {Promise} + */ + async function rotateAndAssignVerifiedUserToSession({req, user, ip}) { + await new Promise((resolve, reject) => { + req.session.regenerate((err) => { + if (err) { + reject(err); + return; + } + resolve(); + }); + }); + await assignVerifiedUserToSession({ + session: req.session, + user, + origin: getOriginOfRequest(req), + ip + }); + } + /** * generateAuthCodeForUser * @@ -494,6 +522,7 @@ module.exports = function createSessionService({ createSessionForUser, createVerifiedSessionForUser, assignVerifiedUserToSession, + rotateAndAssignVerifiedUserToSession, removeUserForSession, verifySession, isVerifiedSession, diff --git a/ghost/core/test/e2e-api/admin/__snapshots__/users.test.js.snap b/ghost/core/test/e2e-api/admin/__snapshots__/users.test.js.snap index 7d9d20a9d78..ec79fba330b 100644 --- a/ghost/core/test/e2e-api/admin/__snapshots__/users.test.js.snap +++ b/ghost/core/test/e2e-api/admin/__snapshots__/users.test.js.snap @@ -18,6 +18,9 @@ Object { "content-type": "application/json; charset=utf-8", "content-version": StringMatching /v\\\\d\\+\\\\\\.\\\\d\\+/, "etag": StringMatching /\\(\\?:W\\\\/\\)\\?"\\(\\?:\\[ !#-\\\\x7E\\\\x80-\\\\xFF\\]\\*\\|\\\\r\\\\n\\[\\\\t \\]\\|\\\\\\\\\\.\\)\\*"/, + "set-cookie": Array [ + StringMatching /\\^ghost-admin-api-session=/, + ], "vary": "Accept-Version, Origin, Accept-Encoding", "x-powered-by": "Express", } diff --git a/ghost/core/test/e2e-api/admin/session-invalidation.test.js b/ghost/core/test/e2e-api/admin/session-invalidation.test.js new file mode 100644 index 00000000000..63c50d4e149 --- /dev/null +++ b/ghost/core/test/e2e-api/admin/session-invalidation.test.js @@ -0,0 +1,249 @@ +const {agentProvider, fixtureManager, resetRateLimits} = require('../../utils/e2e-framework'); +const {mockMail, restore} = require('../../utils/e2e-framework-mock-manager'); +const models = require('../../../core/server/models'); +const security = require('@tryghost/security'); +const settingsCache = require('../../../core/shared/settings-cache'); +const moment = require('moment'); + +const DEFAULT_PASSWORD = 'Sl1m3rson99'; + +async function stealCookiesAs(agent, role) { + return agent.loginAs(null, null, role); +} + +function injectStolenCookies(agent, setCookieHeaders) { + agent.jar.setCookies(setCookieHeaders); +} + +describe('Session invalidation on password change', function () { + let agentA; + let agentB; + let ownerAgent; + let ownerId; + let adminId; + + before(async function () { + agentA = await agentProvider.getAdminAPIAgent(); + agentB = await agentProvider.getAdminAPIAgent(); + ownerAgent = await agentProvider.getAdminAPIAgent(); + await fixtureManager.init('users'); + ownerId = fixtureManager.get('users', 0).id; + adminId = fixtureManager.get('users', 1).id; + }); + + beforeEach(async function () { + await resetRateLimits(); + agentA.resetAuthentication(); + agentB.resetAuthentication(); + ownerAgent.resetAuthentication(); + + await models.User.edit( + {password: DEFAULT_PASSWORD}, + {id: ownerId, context: {internal: true}} + ); + await models.User.edit( + {password: DEFAULT_PASSWORD}, + {id: adminId, context: {internal: true}} + ); + }); + + afterEach(function () { + restore(); + }); + + it('Changing password invalidates other concurrent sessions for the same user', async function () { + await agentA.loginAsOwner(); + await agentB.loginAsOwner(); + + await agentA.get('users/me/').expectStatus(200); + await agentB.get('users/me/').expectStatus(200); + + const newPassword = 'Sl1m3rson99-rotated!'; + + await agentA.put('users/password/') + .body({ + password: [{ + newPassword, + ne2Password: newPassword, + oldPassword: DEFAULT_PASSWORD, + user_id: ownerId + }] + }) + .expectStatus(200); + + await agentA.get('users/me/').expectStatus(200); + await agentB.get('users/me/').expectStatus(403); + }); + + it('Stolen-cookie scenario: a cloned session cookie is invalidated by password change', async function () { + const victim = agentA; + const attacker = agentB; + + const stolenCookies = await stealCookiesAs(victim, 'owner'); + injectStolenCookies(attacker, stolenCookies); + + await victim.get('users/me/').expectStatus(200); + await attacker.get('users/me/').expectStatus(200); + + const newPassword = 'Sl1m3rson99-after-theft!'; + + await victim.put('users/password/') + .body({ + password: [{ + newPassword, + ne2Password: newPassword, + oldPassword: DEFAULT_PASSWORD, + user_id: ownerId + }] + }) + .expectStatus(200); + + await victim.get('users/me/').expectStatus(200); + await attacker.get('users/me/').expectStatus(403); + }); + + it('Owner changing another user\'s password invalidates all of that user\'s sessions', async function () { + await ownerAgent.loginAsOwner(); + await agentA.loginAsAdmin(); + await agentB.loginAsAdmin(); + + await agentA.get('users/me/').expectStatus(200); + await agentB.get('users/me/').expectStatus(200); + + const newPassword = 'Sl1m3rson99-by-owner!'; + + await ownerAgent.put('users/password/') + .body({ + password: [{ + newPassword, + ne2Password: newPassword, + user_id: adminId + }] + }) + .expectStatus(200); + + await ownerAgent.get('users/me/').expectStatus(200); + await agentA.get('users/me/').expectStatus(403); + await agentB.get('users/me/').expectStatus(403); + }); + + it('Password reset invalidates all prior sessions including stolen cookies', async function () { + const victimCookies = await stealCookiesAs(agentA, 'owner'); + await agentB.loginAsOwner(); + + await agentA.get('users/me/').expectStatus(200); + await agentB.get('users/me/').expectStatus(200); + + mockMail(); + + const ownerFixture = fixtureManager.get('users', 0); + const dbHash = settingsCache.get('db_hash'); + const user = await models.User.getByEmail(ownerFixture.email, {context: {internal: true}}); + + const resetToken = security.tokens.resetToken.generateHash({ + expires: moment().add(1, 'days').valueOf(), + email: ownerFixture.email, + dbHash, + password: user.get('password') + }); + const encodedToken = security.url.encodeBase64(resetToken); + const newPassword = 'Sl1m3rson99-reset!'; + + agentA.clearCookies(); + + await agentA.put('authentication/password_reset') + .body({ + password_reset: [{ + token: encodedToken, + newPassword, + ne2Password: newPassword + }] + }) + .expectStatus(200); + + await agentA.get('users/me/').expectStatus(200); + await agentB.get('users/me/').expectStatus(403); + + ownerAgent.resetAuthentication(); + injectStolenCookies(ownerAgent, victimCookies); + await ownerAgent.get('users/me/').expectStatus(403); + }); + + it('PUT /users/:id strips password from the request body and does not affect sessions', async function () { + await agentA.loginAsAdmin(); + + await agentA.get('users/me/').expectStatus(200); + + await agentA.put(`users/${adminId}/`) + .body({ + users: [{ + id: adminId, + password: 'attempt-to-change-via-edit' + }] + }) + .expectStatus(200); + + await agentA.get('users/me/').expectStatus(200); + + await agentB.resetAuthentication(); + await agentB.loginAsAdmin(); + await agentB.get('users/me/').expectStatus(200); + }); + + describe('Failed attempts leave sessions untouched', function () { + it('Wrong old password: sessions are not destroyed', async function () { + await agentA.loginAsOwner(); + await agentB.loginAsOwner(); + + await agentA.put('users/password/') + .body({ + password: [{ + newPassword: 'Sl1m3rson99-wrong-old!', + ne2Password: 'Sl1m3rson99-wrong-old!', + oldPassword: 'not-the-right-password', + user_id: ownerId + }] + }) + .expectStatus(422); + + await agentA.get('users/me/').expectStatus(200); + await agentB.get('users/me/').expectStatus(200); + }); + + it('Unauthorized caller (author changing owner): sessions are not destroyed', async function () { + await ownerAgent.loginAsOwner(); + await agentA.loginAs(null, null, 'author'); + + await agentA.put('users/password/') + .body({ + password: [{ + newPassword: 'Sl1m3rson99-nope!', + ne2Password: 'Sl1m3rson99-nope!', + user_id: ownerId + }] + }) + .expectStatus(403); + + await ownerAgent.get('users/me/').expectStatus(200); + await agentA.get('users/me/').expectStatus(200); + }); + + it('Invalid password reset token: sessions are not destroyed', async function () { + await agentA.loginAsOwner(); + await agentB.loginAsOwner(); + + await ownerAgent.put('authentication/password_reset') + .body({ + password_reset: [{ + token: 'this-is-not-a-valid-token', + newPassword: 'Sl1m3rson99-nope!', + ne2Password: 'Sl1m3rson99-nope!' + }] + }) + .expectStatus(401); + + await agentA.get('users/me/').expectStatus(200); + await agentB.get('users/me/').expectStatus(200); + }); + }); +}); diff --git a/ghost/core/test/e2e-api/admin/users.test.js b/ghost/core/test/e2e-api/admin/users.test.js index 82f0d8c3de9..35513974868 100644 --- a/ghost/core/test/e2e-api/admin/users.test.js +++ b/ghost/core/test/e2e-api/admin/users.test.js @@ -3,7 +3,7 @@ const config = require('../../../core/shared/config'); const models = require('../../../core/server/models'); const db = require('../../../core/server/data/db'); const {agentProvider, fixtureManager, matchers, assertions} = require('../../utils/e2e-framework'); -const {anyContentVersion, anyEtag, anyObject, anyObjectId, anyArray, anyISODateTime, anyString, nullable} = matchers; +const {anyContentVersion, anyEtag, anyObject, anyObjectId, anyArray, anyISODateTime, anyString, nullable, stringMatching} = matchers; const {cacheInvalidateHeaderNotSet, cacheInvalidateHeaderSetToWildcard} = assertions; const localUtils = require('./utils'); @@ -549,7 +549,10 @@ describe('User API', function () { .expectStatus(200) .matchHeaderSnapshot({ 'content-version': anyContentVersion, - etag: anyEtag + etag: anyEtag, + 'set-cookie': [ + stringMatching(/^ghost-admin-api-session=/) + ] }) .matchBodySnapshot() .expect(({body}) => { From 10da09186253d7be24698d3a5512c7f9cbea13a4 Mon Sep 17 00:00:00 2001 From: Ghost CI <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 27 Apr 2026 14:45:57 +0000 Subject: [PATCH 07/10] v6.34.0 --- ghost/admin/package.json | 2 +- ghost/core/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ghost/admin/package.json b/ghost/admin/package.json index 0660d92f871..e269269155d 100644 --- a/ghost/admin/package.json +++ b/ghost/admin/package.json @@ -1,6 +1,6 @@ { "name": "ghost-admin", - "version": "6.34.0-rc.0", + "version": "6.34.0", "description": "Ember.js admin client for Ghost", "author": "Ghost Foundation", "homepage": "http://ghost.org", diff --git a/ghost/core/package.json b/ghost/core/package.json index 7001de1b827..0f2702f0806 100644 --- a/ghost/core/package.json +++ b/ghost/core/package.json @@ -1,6 +1,6 @@ { "name": "ghost", - "version": "6.34.0-rc.0", + "version": "6.34.0", "description": "The professional publishing platform", "author": "Ghost Foundation", "homepage": "https://ghost.org", From 1c6617fc846548d2522dc1594291b0e4e29dbb20 Mon Sep 17 00:00:00 2001 From: Ghost CI <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 27 Apr 2026 14:46:00 +0000 Subject: [PATCH 08/10] Bumped version to 6.35.0-rc.0 --- ghost/admin/package.json | 2 +- ghost/core/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ghost/admin/package.json b/ghost/admin/package.json index e269269155d..de9d7ba183a 100644 --- a/ghost/admin/package.json +++ b/ghost/admin/package.json @@ -1,6 +1,6 @@ { "name": "ghost-admin", - "version": "6.34.0", + "version": "6.35.0-rc.0", "description": "Ember.js admin client for Ghost", "author": "Ghost Foundation", "homepage": "http://ghost.org", diff --git a/ghost/core/package.json b/ghost/core/package.json index 0f2702f0806..f27b7770e40 100644 --- a/ghost/core/package.json +++ b/ghost/core/package.json @@ -1,6 +1,6 @@ { "name": "ghost", - "version": "6.34.0", + "version": "6.35.0-rc.0", "description": "The professional publishing platform", "author": "Ghost Foundation", "homepage": "https://ghost.org", From f86f368ea063c4394be146b5c7d8b5ad8b1b0d94 Mon Sep 17 00:00:00 2001 From: Evan Hahn Date: Mon, 27 Apr 2026 11:00:08 -0500 Subject: [PATCH 09/10] Improved types for `BootLogger` class (#27567) no ref This is a types-only change. --- ghost/core/core/boot.js | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/ghost/core/core/boot.js b/ghost/core/core/boot.js index 9f653164cf6..c5d55c6b12b 100644 --- a/ghost/core/core/boot.js +++ b/ghost/core/core/boot.js @@ -16,11 +16,20 @@ const debug = require('@tryghost/debug')('boot'); * Helper class to create consistent log messages */ class BootLogger { + /** + * @param {{info: (message: string) => unknown}} logging + * @param {{metric: (name: string, time: number) => unknown}} metrics + * @param {number} startTime + */ constructor(logging, metrics, startTime) { this.logging = logging; this.metrics = metrics; this.startTime = startTime; } + /** + * @param {string} message + * @returns {void} + */ log(message) { let {logging, startTime} = this; logging.info(`Ghost ${message} in ${(Date.now() - startTime) / 1000}s`); @@ -28,6 +37,7 @@ class BootLogger { /** * @param {string} name * @param {number} [initialTime] + * @returns {void} */ metric(name, initialTime) { let {metrics, startTime} = this; @@ -161,7 +171,7 @@ async function initCore({ghostServer, config, frontend}) { /** * These are services required by Ghost's frontend. * @param {object} options - * @param {object} options.bootLogger + * @param {BootLogger} options.bootLogger */ async function initServicesForFrontend({bootLogger}) { From b11949015d0f7292aa92ae328378ee6c187476da Mon Sep 17 00:00:00 2001 From: Sag Date: Mon, 27 Apr 2026 18:05:58 +0200 Subject: [PATCH 10/10] Fixed gift members unable to redeem or continue when tier is archived (#27561) closes https://linear.app/ghost/issue/BER-3566 We validate that a tier is active at the time a gift is purchased. After purchase, gifts should remain redeemable even if the tier is later archived, since they have already been paid for already. This change allows gifts to be redeemed on an archived tier. Additionally, gift members can currently only continue their subscription on the same tier/cadence so remaining gift time can be carried over as trial days. If that tier has since been archived, this results in a checkout error, as we don't allow paid subscriptions on archived tiers. With this change, the UI renders the standard upgrade flow when the gifted tier is no longer available, letting the member pick from current active tiers. The trade-off is that the remaining gift days can't be preserved in that case; archived tiers are expected to be an edge case in practice. ## What changed Portal: - New `isArchivedTier({member, site})` helper that follows the existing `isActiveOffer` pattern (a tier id missing from `site.products` means the tier has been archived). - The gift "Continue subscription" banner and the gift-specific Continue button on the account page are gated on the tier still being available; otherwise the regular Change button takes over and routes the member through the standard upgrade screen. Backend: - `MemberRepository.create` / `update` previously rejected any product that wasn't `active` with "Cannot use archived Tiers", which also blocked the gift-redemption path (creating a member on the gifted tier). The check is now skipped when `status === 'gift'`. Non-gift paths (comped, paid) keep the archived-tier block. - In `create()`, the products validation block was moved below the status defaulting block so the gift carve-out can read the resolved status. --- apps/portal/package.json | 2 +- .../continue-gift-subscription-banner.js | 6 +- .../components/paid-account-actions.js | 17 +- apps/portal/src/utils/helpers.js | 12 ++ .../paid-account-actions.test.js | 169 +++++++++++++++++- apps/portal/test/utils/helpers.test.js | 32 ++++ .../repositories/member-repository.js | 32 ++-- .../repositories/member-repository.test.js | 80 +++++++++ 8 files changed, 328 insertions(+), 22 deletions(-) diff --git a/apps/portal/package.json b/apps/portal/package.json index 0e7c33b7dd3..bfeb6c41f32 100644 --- a/apps/portal/package.json +++ b/apps/portal/package.json @@ -1,6 +1,6 @@ { "name": "@tryghost/portal", - "version": "2.68.17", + "version": "2.68.18", "license": "MIT", "repository": "https://github.com/TryGhost/Ghost", "author": "Ghost Foundation", diff --git a/apps/portal/src/components/pages/AccountHomePage/components/continue-gift-subscription-banner.js b/apps/portal/src/components/pages/AccountHomePage/components/continue-gift-subscription-banner.js index 2a57838c0f4..8af8b5585fd 100644 --- a/apps/portal/src/components/pages/AccountHomePage/components/continue-gift-subscription-banner.js +++ b/apps/portal/src/components/pages/AccountHomePage/components/continue-gift-subscription-banner.js @@ -1,12 +1,12 @@ import AppContext from '../../../../app-context'; import ActionButton from '../../../common/action-button'; -import {getSubscriptionExpiry, isGiftMember} from '../../../../utils/helpers'; +import {getSubscriptionExpiry, isArchivedTier, isGiftMember} from '../../../../utils/helpers'; import {useContext} from 'react'; const ContinueGiftSubscriptionBanner = () => { - const {member, doAction, action, brandColor} = useContext(AppContext); + const {member, site, doAction, action, brandColor} = useContext(AppContext); - if (!isGiftMember({member})) { + if (!isGiftMember({member}) || isArchivedTier({member, site})) { return null; } diff --git a/apps/portal/src/components/pages/AccountHomePage/components/paid-account-actions.js b/apps/portal/src/components/pages/AccountHomePage/components/paid-account-actions.js index 9b7dc75e123..88032927d79 100644 --- a/apps/portal/src/components/pages/AccountHomePage/components/paid-account-actions.js +++ b/apps/portal/src/components/pages/AccountHomePage/components/paid-account-actions.js @@ -1,5 +1,5 @@ import AppContext from '../../../../app-context'; -import {getSubscriptionExpiry, getMemberSubscription, getMemberTierName, hasMultipleProductsFeature, hasOnlyFreePlan, isComplimentaryMember, isGiftMember, isPaidMember, subscriptionHasFreeTrial} from '../../../../utils/helpers'; +import {getSubscriptionExpiry, getMemberSubscription, getMemberTierName, hasMultipleProductsFeature, hasOnlyFreePlan, isArchivedTier, isComplimentaryMember, isGiftMember, isPaidMember, subscriptionHasFreeTrial} from '../../../../utils/helpers'; import {getDateString} from '../../../../utils/date-time'; import {ReactComponent as GiftIcon} from '../../../../images/icons/gift.svg'; import {ReactComponent as LoaderIcon} from '../../../../images/icons/loader.svg'; @@ -97,10 +97,21 @@ const PaidAccountActions = () => { }; const PlanUpdateButton = ({isPaid}) => { - if (hasOnlyFreePlan({site}) && !isPaid) { + const hasGiftSubscription = isGiftMember({member}); + const canContinueGiftSubscription = hasGiftSubscription && !isArchivedTier({member, site}); + + // If no paid tiers are available, hide the plan update button for: + // - Free members, as they have no paid plans to upgrade to + // - Gift members on archived tiers, as they have no paid plans to upgrade to + // + // In constrast, still render the button for: + // - Paid members so that they can adjust the cadence on their existing sub + // - Comped members so that they can contact publishers to make changes to their complimentary access + if (hasOnlyFreePlan({site}) && (!isPaid || (hasGiftSubscription && !canContinueGiftSubscription))) { return null; } - if (isGiftMember({member})) { + + if (canContinueGiftSubscription) { return (