Why is the generic type parameter inferred differently in the following toy experiment, depending on whether the template is instantiated with an extended type or with an intersected type? This experiment has been simplified from a real-world example.
interface Base { b: number }
interface Extra { a: string }
interface Ext1 extends Extra { b: number }
type Ext2 = Base & Extra
// f returns a function that takes a T as input
const f = <T extends Base>(inp: T & Extra): ((arg: T) => void) => {
return (arg: T) => console.log(inp.a + arg.b)
}
const x1: Ext1 = { a: "x1", b: 1 }
const x2: Ext2 = { a: "y1", b: 2 }
const f1 = f(x1) // T inferred to Ext1
const f2 = f(x2) // T inferred to Base, NOT Ext2 (why?)
const inp = { b: 3 }
// error Argument of type '{ b: number; }' is not assignable to parameter of type 'Ext1'. Property 'a' is missing in type '{ b: number; }' but required in type 'Ext1'.
const out1 = f1(inp)
// ok since inp is of type Base
const out2 = f2(inp)