adding untracked frontend and typescript files
This commit is contained in:
parent
95fadd4264
commit
085d7c68ba
|
@ -0,0 +1,118 @@
|
||||||
|
using System.Text.Json.Nodes;
|
||||||
|
using InnovEnergy.Lib.StatusApi;
|
||||||
|
|
||||||
|
namespace InnovEnergy.SaliMax.Log;
|
||||||
|
|
||||||
|
public static class JsonUtil
|
||||||
|
{
|
||||||
|
public static JsonObject CreateDevice(this DeviceType deviceType, String name)
|
||||||
|
{
|
||||||
|
return new JsonObject
|
||||||
|
{
|
||||||
|
{ "Name", name },
|
||||||
|
{ "Type", deviceType.ToString() }
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public static JsonObject AddAcConnection(this JsonObject json, Decimal frequency, IEnumerable<JsonNode> acPhases)
|
||||||
|
{
|
||||||
|
return json.AddAcConnection(frequency, acPhases.ToArray());
|
||||||
|
}
|
||||||
|
|
||||||
|
public static JsonObject AddAcConnection(this JsonObject json, Decimal frequency, params JsonNode[] acPhases)
|
||||||
|
{
|
||||||
|
return json
|
||||||
|
.AddProp("Ac", new JsonArray(acPhases))
|
||||||
|
.AddProp("Frequency", frequency);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static JsonObject AddAlarms<T>(this JsonObject json, IEnumerable<T> alarms)
|
||||||
|
{
|
||||||
|
return json.AddProp("Alarms", alarms.ToJsonArray());
|
||||||
|
}
|
||||||
|
|
||||||
|
public static JsonObject AddWarnings<T>(this JsonObject json, IEnumerable<T> warnings)
|
||||||
|
{
|
||||||
|
return json.AddProp("Warnings", warnings.ToJsonArray());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public static JsonObject AddProp(this JsonObject json, String key, JsonNode? value)
|
||||||
|
{
|
||||||
|
json.Add(key, value);
|
||||||
|
return json;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public static JsonObject AddDcConnection(this JsonObject json, Decimal current, Decimal voltage)
|
||||||
|
{
|
||||||
|
return json.AddProp("Dc", CreateDcPhase(current, voltage));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static JsonObject AddDc48Connection(this JsonObject json, Decimal current, Decimal voltage)
|
||||||
|
{
|
||||||
|
return json.AddProp("Dc48", CreateDcPhase(current, voltage));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static JsonObject CreateAcPhase(Decimal current, Decimal voltage, Decimal phi)
|
||||||
|
{
|
||||||
|
return new JsonObject
|
||||||
|
{
|
||||||
|
["Current"] = current,
|
||||||
|
["Voltage"] = voltage,
|
||||||
|
["Phi" ] = phi,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public static JsonObject CreateDcPhase(Decimal current, Decimal voltage)
|
||||||
|
{
|
||||||
|
return new JsonObject
|
||||||
|
{
|
||||||
|
["Current"] = current ,
|
||||||
|
["Voltage"] = voltage ,
|
||||||
|
["Power"] = current * voltage,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Decimal Round3(this Decimal val)
|
||||||
|
{
|
||||||
|
return Decimal.Round(val, 3);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Decimal Round0(this Decimal val)
|
||||||
|
{
|
||||||
|
return Decimal.Round(val, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static JsonObject CreateBus(String left, String top, String bottom, String right, String name)
|
||||||
|
{
|
||||||
|
return new JsonObject
|
||||||
|
{
|
||||||
|
["Name"] = name,
|
||||||
|
["Left"] = left,
|
||||||
|
["Top"] = top,
|
||||||
|
["Bottom"] = bottom,
|
||||||
|
["Right"] = right
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String Port(DeviceType dt, BusPort bp, Boolean display = true)
|
||||||
|
{
|
||||||
|
return $"{Enum.GetName(dt)}:{Enum.GetName(bp)}:{(display ? "show" : "hide")}";
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public static JsonArray ToJsonArray<T>(this IEnumerable<T> things)
|
||||||
|
{
|
||||||
|
var jsonValues = things
|
||||||
|
.Select(t => t!.ToString())
|
||||||
|
.Select(t => JsonValue.Create(t))
|
||||||
|
.OfType<JsonNode>()
|
||||||
|
.ToArray();
|
||||||
|
|
||||||
|
return new JsonArray(jsonValues);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,11 @@
|
||||||
|
using InnovEnergy.Lib.Utils;
|
||||||
|
|
||||||
|
namespace InnovEnergy.SaliMax;
|
||||||
|
|
||||||
|
public static class Utils
|
||||||
|
{
|
||||||
|
public static Decimal Round3(this Decimal d)
|
||||||
|
{
|
||||||
|
return DecimalUtils.RoundToSignificantFigures(d, 3);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,10 @@
|
||||||
|
using InnovEnergy.Lib.StatusApi.Phases;
|
||||||
|
|
||||||
|
namespace InnovEnergy.Lib.StatusApi.Connections;
|
||||||
|
|
||||||
|
|
||||||
|
public class DcConnection : Phase
|
||||||
|
{
|
||||||
|
public static DcConnection Null { get; } = new DcConnection();
|
||||||
|
public Decimal Power => Current * Voltage;
|
||||||
|
}
|
|
@ -0,0 +1,10 @@
|
||||||
|
using InnovEnergy.Lib.StatusApi.Phases;
|
||||||
|
|
||||||
|
namespace InnovEnergy.Lib.StatusApi.Connections;
|
||||||
|
|
||||||
|
public class SinglePhaseAcConnection : AcPhase
|
||||||
|
{
|
||||||
|
public Decimal Frequency { get; init; }
|
||||||
|
|
||||||
|
public new static SinglePhaseAcConnection Null = new SinglePhaseAcConnection();
|
||||||
|
}
|
|
@ -0,0 +1,19 @@
|
||||||
|
using InnovEnergy.Lib.StatusApi.Phases;
|
||||||
|
|
||||||
|
namespace InnovEnergy.Lib.StatusApi.Connections;
|
||||||
|
|
||||||
|
public class ThreePhaseAcConnection
|
||||||
|
{
|
||||||
|
public static ThreePhaseAcConnection Null { get; } = new ThreePhaseAcConnection();
|
||||||
|
|
||||||
|
public AcPhase L1 { get; init; } = AcPhase.Null;
|
||||||
|
public AcPhase L2 { get; init; } = AcPhase.Null;
|
||||||
|
public AcPhase L3 { get; init; } = AcPhase.Null;
|
||||||
|
|
||||||
|
public Decimal Frequency { get; init; }
|
||||||
|
|
||||||
|
public Decimal ApparentPower => L1.ApparentPower + L2.ApparentPower + L3.ApparentPower;
|
||||||
|
public Decimal ReactivePower => L1.ReactivePower + L2.ReactivePower + L3.ReactivePower;
|
||||||
|
public Decimal ActivePower => L1.ActivePower + L2.ActivePower + L3.ActivePower;
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,34 @@
|
||||||
|
namespace InnovEnergy.Lib.StatusApi;
|
||||||
|
|
||||||
|
/// SIGN CONVENTION
|
||||||
|
///
|
||||||
|
/// Voltages have to be measured/indicated so that they are guaranteed to be never negative.
|
||||||
|
/// In the case of AC this is accomplished by using the RMS measurement.
|
||||||
|
/// The sign convention of the current (and hence power, since voltage defined to be never negative)
|
||||||
|
/// depends on the type of the device.
|
||||||
|
/// If the device can only produce (e.g. PV) or only consume (e.g. Loads),
|
||||||
|
/// then the current has to be 0 or positive.
|
||||||
|
/// If the device is a prosumer (e.g. inverter, battery, grid...)
|
||||||
|
/// then a positive sign denotes current (power) flow away from the grid (to the "right")
|
||||||
|
/// and a negative sign denotes current (power) flow towards the grid (to the "left")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
/// the currently known DeviceTypes, to be serialized as string in JSON
|
||||||
|
public enum DeviceType
|
||||||
|
{
|
||||||
|
None,
|
||||||
|
PvOnAcIn ,
|
||||||
|
PvOnAcOut ,
|
||||||
|
PvOnDc ,
|
||||||
|
Load ,
|
||||||
|
CriticalLoad,
|
||||||
|
Battery ,
|
||||||
|
Grid ,
|
||||||
|
Inverter ,
|
||||||
|
AcInToAcOut ,
|
||||||
|
DcDc ,
|
||||||
|
DcLoad ,
|
||||||
|
Losses
|
||||||
|
}
|
|
@ -0,0 +1,8 @@
|
||||||
|
|
||||||
|
namespace InnovEnergy.Lib.StatusApi.Devices;
|
||||||
|
|
||||||
|
public abstract class Battery : DcDevice
|
||||||
|
{
|
||||||
|
public Decimal Soc { get; init; }
|
||||||
|
public Decimal Temperature { get; init; }
|
||||||
|
}
|
|
@ -0,0 +1,9 @@
|
||||||
|
using InnovEnergy.Lib.StatusApi.Connections;
|
||||||
|
|
||||||
|
namespace InnovEnergy.Lib.StatusApi.Devices;
|
||||||
|
|
||||||
|
public abstract class DcDcConverter : Device
|
||||||
|
{
|
||||||
|
public DcConnection Left { get; init; } = DcConnection.Null;
|
||||||
|
public DcConnection Right { get; init; } = DcConnection.Null;
|
||||||
|
}
|
|
@ -0,0 +1,8 @@
|
||||||
|
using InnovEnergy.Lib.StatusApi.Connections;
|
||||||
|
|
||||||
|
namespace InnovEnergy.Lib.StatusApi.Devices;
|
||||||
|
|
||||||
|
public abstract class DcDevice : Device
|
||||||
|
{
|
||||||
|
public DcConnection Dc { get; init; } = DcConnection.Null;
|
||||||
|
}
|
|
@ -0,0 +1,17 @@
|
||||||
|
namespace InnovEnergy.Lib.StatusApi.Devices;
|
||||||
|
|
||||||
|
public abstract class Device
|
||||||
|
{
|
||||||
|
public String DeviceType
|
||||||
|
{
|
||||||
|
get
|
||||||
|
{
|
||||||
|
var t = GetType();
|
||||||
|
|
||||||
|
while (!t!.IsAbstract)
|
||||||
|
t = t.BaseType;
|
||||||
|
|
||||||
|
return t.Name;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,7 @@
|
||||||
|
namespace InnovEnergy.Lib.StatusApi.Devices;
|
||||||
|
|
||||||
|
|
||||||
|
public abstract class GridMeter : ThreePhaseAcDevice
|
||||||
|
{
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,8 @@
|
||||||
|
using InnovEnergy.Lib.StatusApi.Connections;
|
||||||
|
|
||||||
|
namespace InnovEnergy.Lib.StatusApi.Devices;
|
||||||
|
|
||||||
|
public interface IPvCoupledDevice
|
||||||
|
{
|
||||||
|
IReadOnlyList<DcConnection> Strings { get; }
|
||||||
|
}
|
|
@ -0,0 +1,8 @@
|
||||||
|
using InnovEnergy.Lib.StatusApi.Connections;
|
||||||
|
|
||||||
|
namespace InnovEnergy.Lib.StatusApi.Devices;
|
||||||
|
|
||||||
|
public abstract class Mppt : DcDevice, IPvCoupledDevice
|
||||||
|
{
|
||||||
|
public IReadOnlyList<DcConnection> Strings { get; init; } = ArraySegment<DcConnection>.Empty;
|
||||||
|
}
|
|
@ -0,0 +1,9 @@
|
||||||
|
using InnovEnergy.Lib.StatusApi.Connections;
|
||||||
|
|
||||||
|
namespace InnovEnergy.Lib.StatusApi.Devices;
|
||||||
|
|
||||||
|
|
||||||
|
public abstract class SinglePhaseAcDevice : Device
|
||||||
|
{
|
||||||
|
public SinglePhaseAcConnection Ac { get; init; } = SinglePhaseAcConnection.Null;
|
||||||
|
}
|
|
@ -0,0 +1,11 @@
|
||||||
|
using InnovEnergy.Lib.StatusApi.Connections;
|
||||||
|
|
||||||
|
namespace InnovEnergy.Lib.StatusApi.Devices;
|
||||||
|
|
||||||
|
public abstract class SinglePhaseInverter
|
||||||
|
{
|
||||||
|
public SinglePhaseAcConnection Ac { get; init; } = SinglePhaseAcConnection.Null;
|
||||||
|
public DcConnection Dc { get; init; } = DcConnection.Null;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
using InnovEnergy.Lib.StatusApi.Connections;
|
||||||
|
|
||||||
|
namespace InnovEnergy.Lib.StatusApi.Devices;
|
||||||
|
|
||||||
|
public abstract class SinglePhasePvInverter : SinglePhaseAcDevice, IPvCoupledDevice
|
||||||
|
{
|
||||||
|
public IReadOnlyList<DcConnection> Strings { get; init; } = ArraySegment<DcConnection>.Empty;
|
||||||
|
}
|
|
@ -0,0 +1,8 @@
|
||||||
|
using InnovEnergy.Lib.StatusApi.Connections;
|
||||||
|
|
||||||
|
namespace InnovEnergy.Lib.StatusApi.Devices;
|
||||||
|
|
||||||
|
public abstract class ThreePhaseAcDevice: Device
|
||||||
|
{
|
||||||
|
public ThreePhaseAcConnection Ac { get; init; } = ThreePhaseAcConnection.Null;
|
||||||
|
}
|
|
@ -0,0 +1,11 @@
|
||||||
|
|
||||||
|
|
||||||
|
using InnovEnergy.Lib.StatusApi.Connections;
|
||||||
|
|
||||||
|
namespace InnovEnergy.Lib.StatusApi.Devices;
|
||||||
|
|
||||||
|
public abstract class ThreePhaseInverter : Device
|
||||||
|
{
|
||||||
|
public ThreePhaseAcConnection Ac { get; init; } = ThreePhaseAcConnection.Null;
|
||||||
|
public DcConnection Dc { get; init; } = DcConnection.Null;
|
||||||
|
}
|
|
@ -0,0 +1,8 @@
|
||||||
|
using InnovEnergy.Lib.StatusApi.Connections;
|
||||||
|
|
||||||
|
namespace InnovEnergy.Lib.StatusApi.Devices;
|
||||||
|
|
||||||
|
public abstract class ThreePhasePvInverter : ThreePhaseAcDevice , IPvCoupledDevice
|
||||||
|
{
|
||||||
|
public IReadOnlyList<DcConnection> Strings { get; init; } = ArraySegment<DcConnection>.Empty;
|
||||||
|
}
|
|
@ -0,0 +1,17 @@
|
||||||
|
using static DecimalMath.DecimalEx;
|
||||||
|
|
||||||
|
namespace InnovEnergy.Lib.StatusApi.Phases;
|
||||||
|
|
||||||
|
|
||||||
|
public class AcPhase : Phase
|
||||||
|
{
|
||||||
|
public Decimal Phi { get; init; }
|
||||||
|
|
||||||
|
public Decimal ApparentPower => Voltage * Current;
|
||||||
|
public Decimal ReactivePower => ApparentPower * Sin(Phi);
|
||||||
|
public Decimal ActivePower => ApparentPower * PowerFactor;
|
||||||
|
public Decimal PowerFactor => Cos(Phi);
|
||||||
|
|
||||||
|
internal AcPhase() {}
|
||||||
|
public static AcPhase Null = new AcPhase();
|
||||||
|
}
|
|
@ -0,0 +1,12 @@
|
||||||
|
namespace InnovEnergy.Lib.StatusApi.Phases;
|
||||||
|
|
||||||
|
/// A phase must have at least a known Voltage and Current.
|
||||||
|
/// For DC this is already enough.
|
||||||
|
/// For AC the values have to be in RMS (not amplitude or P2P)
|
||||||
|
/// Power can be inferred, P = UI
|
||||||
|
|
||||||
|
public abstract class Phase
|
||||||
|
{
|
||||||
|
public Decimal Voltage { get; init; } // U, non-negative
|
||||||
|
public Decimal Current { get; init; } // I, sign depends on device type, see sign convention above
|
||||||
|
}
|
|
@ -0,0 +1,32 @@
|
||||||
|
using DecimalMath;
|
||||||
|
|
||||||
|
namespace InnovEnergy.Lib.Utils;
|
||||||
|
|
||||||
|
public static class DecimalUtils
|
||||||
|
{
|
||||||
|
public static Double RoundToSignificantFigures(this Double num, Int32 n)
|
||||||
|
{
|
||||||
|
if (num == 0)
|
||||||
|
return 0;
|
||||||
|
|
||||||
|
var d = Math.Ceiling(Math.Log10(num < 0 ? -num : num));
|
||||||
|
var power = n - (Int32)d;
|
||||||
|
|
||||||
|
var magnitude = Math.Pow(10, power);
|
||||||
|
var shifted = Math.Round(num * magnitude);
|
||||||
|
return shifted / magnitude;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Decimal RoundToSignificantFigures(this Decimal num, Int32 n)
|
||||||
|
{
|
||||||
|
if (num == 0)
|
||||||
|
return 0;
|
||||||
|
|
||||||
|
var d = Math.Ceiling(DecimalEx.Log10(num < 0 ? -num : num));
|
||||||
|
var power = n - (Int32)d;
|
||||||
|
|
||||||
|
var magnitude = DecimalEx.Pow(10, power);
|
||||||
|
var shifted = Math.Round(num * magnitude);
|
||||||
|
return shifted / magnitude;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,66 @@
|
||||||
|
namespace InnovEnergy.Lib.Utils;
|
||||||
|
|
||||||
|
public class TextBlock
|
||||||
|
{
|
||||||
|
public static TextBlock Empty { get; } = new TextBlock(Array.Empty<String>());
|
||||||
|
|
||||||
|
public IReadOnlyList<String> Lines { get; }
|
||||||
|
|
||||||
|
public Int32 Width => Lines.FirstOrDefault()?.Length ?? 0;
|
||||||
|
public Int32 Height => Lines.Count;
|
||||||
|
|
||||||
|
private TextBlock(IReadOnlyList<String> lines) => Lines = lines;
|
||||||
|
|
||||||
|
public static TextBlock AlignLeft (params Object[] lines) => AlignHorizontal((l, w) => l.PadRight(w), lines);
|
||||||
|
public static TextBlock AlignRight(params Object[] lines) => AlignHorizontal((l, w) => l.PadLeft(w), lines);
|
||||||
|
public static TextBlock AlignHCenter(params Object[] lines) => AlignHorizontal((l,w) => l.PadLeft((w + l.Length) / 2).PadRight(w), lines);
|
||||||
|
|
||||||
|
public static TextBlock AlignTop(params Object[] columns)
|
||||||
|
{
|
||||||
|
if (!columns.Any())
|
||||||
|
return Empty;
|
||||||
|
|
||||||
|
var cs = columns.Select(GetLines).ToArray(columns.Length);
|
||||||
|
var ws = cs.Select(c => c.Count).ToArray(cs.Length);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public static TextBlock HSpace(Int32 width) => new TextBlock(new []{"".PadRight(width)});
|
||||||
|
public static TextBlock VSpace(Int32 height) => new TextBlock(Enumerable.Repeat("", height).ToArray(height));
|
||||||
|
public static TextBlock Space(Int32 width, Int32 height)
|
||||||
|
{
|
||||||
|
return new TextBlock(Enumerable.Repeat("".PadRight(width), height).ToArray(height));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static TextBlock AlignHorizontal(Func<String, Int32, String> alignLine, Object[] lines)
|
||||||
|
{
|
||||||
|
if (!lines.Any())
|
||||||
|
return Empty;
|
||||||
|
|
||||||
|
var strings = lines
|
||||||
|
.SelectMany(GetLines)
|
||||||
|
.ToList();
|
||||||
|
|
||||||
|
var width = strings.Max(l => l.Length);
|
||||||
|
|
||||||
|
var aligned = strings
|
||||||
|
.Select(l => alignLine(l, width))
|
||||||
|
.ToArray(strings.Count);
|
||||||
|
|
||||||
|
return new TextBlock(aligned);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private static IReadOnlyList<String> GetLines(Object l)
|
||||||
|
{
|
||||||
|
return l is TextBlock tb
|
||||||
|
? tb.Lines
|
||||||
|
: l.ToString()?.SplitLines() ?? new[] { "<null>" };
|
||||||
|
}
|
||||||
|
|
||||||
|
public override String ToString() => String.Join(Environment.NewLine, Lines);
|
||||||
|
}
|
|
@ -0,0 +1 @@
|
||||||
|
../asar/bin/asar.js
|
|
@ -0,0 +1 @@
|
||||||
|
../atob/bin/atob.js
|
|
@ -0,0 +1 @@
|
||||||
|
../browserslist/cli.js
|
|
@ -0,0 +1 @@
|
||||||
|
../update-browserslist-db/cli.js
|
|
@ -0,0 +1 @@
|
||||||
|
../coffeescript/bin/cake
|
|
@ -0,0 +1 @@
|
||||||
|
../chrome-remote-interface/bin/client.js
|
|
@ -0,0 +1 @@
|
||||||
|
../coffeescript/bin/coffee
|
|
@ -0,0 +1 @@
|
||||||
|
../indent-string/cli.js
|
|
@ -0,0 +1 @@
|
||||||
|
../is-ci/bin.js
|
|
@ -0,0 +1 @@
|
||||||
|
../is-docker/cli.js
|
|
@ -0,0 +1 @@
|
||||||
|
../is-podman/cli.js
|
|
@ -0,0 +1 @@
|
||||||
|
../jsesc/bin/jsesc
|
|
@ -0,0 +1 @@
|
||||||
|
../json5/lib/cli.js
|
|
@ -0,0 +1 @@
|
||||||
|
../mime/cli.js
|
|
@ -0,0 +1 @@
|
||||||
|
../mkdirp/bin/cmd.js
|
|
@ -0,0 +1 @@
|
||||||
|
../mustache/bin/mustache
|
|
@ -0,0 +1 @@
|
||||||
|
../nanoid/bin/nanoid.cjs
|
|
@ -0,0 +1 @@
|
||||||
|
../which/bin/node-which
|
|
@ -0,0 +1 @@
|
||||||
|
../@babel/parser/bin/babel-parser.js
|
|
@ -0,0 +1 @@
|
||||||
|
../qrcode-terminal/bin/qrcode-terminal.js
|
|
@ -0,0 +1 @@
|
||||||
|
../regjsparser/bin/parser
|
|
@ -0,0 +1 @@
|
||||||
|
../repeating/cli.js
|
|
@ -0,0 +1 @@
|
||||||
|
../resolve/bin/resolve
|
|
@ -0,0 +1 @@
|
||||||
|
../rimraf/bin.js
|
|
@ -0,0 +1 @@
|
||||||
|
../semver/bin/semver
|
|
@ -0,0 +1 @@
|
||||||
|
../testcafe/bin/testcafe-with-v8-flag-filter.js
|
|
@ -0,0 +1 @@
|
||||||
|
../tree-kill/cli.js
|
|
@ -0,0 +1 @@
|
||||||
|
../typescript/bin/tsc
|
|
@ -0,0 +1 @@
|
||||||
|
../typescript/bin/tsserver
|
|
@ -0,0 +1 @@
|
||||||
|
../uuid/dist/bin/uuid
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,202 @@
|
||||||
|
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright 2019 Google LLC
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
|
@ -0,0 +1,218 @@
|
||||||
|
# @ampproject/remapping
|
||||||
|
|
||||||
|
> Remap sequential sourcemaps through transformations to point at the original source code
|
||||||
|
|
||||||
|
Remapping allows you to take the sourcemaps generated through transforming your code and "remap"
|
||||||
|
them to the original source locations. Think "my minified code, transformed with babel and bundled
|
||||||
|
with webpack", all pointing to the correct location in your original source code.
|
||||||
|
|
||||||
|
With remapping, none of your source code transformations need to be aware of the input's sourcemap,
|
||||||
|
they only need to generate an output sourcemap. This greatly simplifies building custom
|
||||||
|
transformations (think a find-and-replace).
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install @ampproject/remapping
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
function remapping(
|
||||||
|
map: SourceMap | SourceMap[],
|
||||||
|
loader: (file: string, ctx: LoaderContext) => (SourceMap | null | undefined),
|
||||||
|
options?: { excludeContent: boolean, decodedMappings: boolean }
|
||||||
|
): SourceMap;
|
||||||
|
|
||||||
|
// LoaderContext gives the loader the importing sourcemap, tree depth, the ability to override the
|
||||||
|
// "source" location (where child sources are resolved relative to, or the location of original
|
||||||
|
// source), and the ability to override the "content" of an original source for inclusion in the
|
||||||
|
// output sourcemap.
|
||||||
|
type LoaderContext = {
|
||||||
|
readonly importer: string;
|
||||||
|
readonly depth: number;
|
||||||
|
source: string;
|
||||||
|
content: string | null | undefined;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
`remapping` takes the final output sourcemap, and a `loader` function. For every source file pointer
|
||||||
|
in the sourcemap, the `loader` will be called with the resolved path. If the path itself represents
|
||||||
|
a transformed file (it has a sourcmap associated with it), then the `loader` should return that
|
||||||
|
sourcemap. If not, the path will be treated as an original, untransformed source code.
|
||||||
|
|
||||||
|
```js
|
||||||
|
// Babel transformed "helloworld.js" into "transformed.js"
|
||||||
|
const transformedMap = JSON.stringify({
|
||||||
|
file: 'transformed.js',
|
||||||
|
// 1st column of 2nd line of output file translates into the 1st source
|
||||||
|
// file, line 3, column 2
|
||||||
|
mappings: ';CAEE',
|
||||||
|
sources: ['helloworld.js'],
|
||||||
|
version: 3,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Uglify minified "transformed.js" into "transformed.min.js"
|
||||||
|
const minifiedTransformedMap = JSON.stringify({
|
||||||
|
file: 'transformed.min.js',
|
||||||
|
// 0th column of 1st line of output file translates into the 1st source
|
||||||
|
// file, line 2, column 1.
|
||||||
|
mappings: 'AACC',
|
||||||
|
names: [],
|
||||||
|
sources: ['transformed.js'],
|
||||||
|
version: 3,
|
||||||
|
});
|
||||||
|
|
||||||
|
const remapped = remapping(
|
||||||
|
minifiedTransformedMap,
|
||||||
|
(file, ctx) => {
|
||||||
|
|
||||||
|
// The "transformed.js" file is an transformed file.
|
||||||
|
if (file === 'transformed.js') {
|
||||||
|
// The root importer is empty.
|
||||||
|
console.assert(ctx.importer === '');
|
||||||
|
// The depth in the sourcemap tree we're currently loading.
|
||||||
|
// The root `minifiedTransformedMap` is depth 0, and its source children are depth 1, etc.
|
||||||
|
console.assert(ctx.depth === 1);
|
||||||
|
|
||||||
|
return transformedMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Loader will be called to load transformedMap's source file pointers as well.
|
||||||
|
console.assert(file === 'helloworld.js');
|
||||||
|
// `transformed.js`'s sourcemap points into `helloworld.js`.
|
||||||
|
console.assert(ctx.importer === 'transformed.js');
|
||||||
|
// This is a source child of `transformed`, which is a source child of `minifiedTransformedMap`.
|
||||||
|
console.assert(ctx.depth === 2);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log(remapped);
|
||||||
|
// {
|
||||||
|
// file: 'transpiled.min.js',
|
||||||
|
// mappings: 'AAEE',
|
||||||
|
// sources: ['helloworld.js'],
|
||||||
|
// version: 3,
|
||||||
|
// };
|
||||||
|
```
|
||||||
|
|
||||||
|
In this example, `loader` will be called twice:
|
||||||
|
|
||||||
|
1. `"transformed.js"`, the first source file pointer in the `minifiedTransformedMap`. We return the
|
||||||
|
associated sourcemap for it (its a transformed file, after all) so that sourcemap locations can
|
||||||
|
be traced through it into the source files it represents.
|
||||||
|
2. `"helloworld.js"`, our original, unmodified source code. This file does not have a sourcemap, so
|
||||||
|
we return `null`.
|
||||||
|
|
||||||
|
The `remapped` sourcemap now points from `transformed.min.js` into locations in `helloworld.js`. If
|
||||||
|
you were to read the `mappings`, it says "0th column of the first line output line points to the 1st
|
||||||
|
column of the 2nd line of the file `helloworld.js`".
|
||||||
|
|
||||||
|
### Multiple transformations of a file
|
||||||
|
|
||||||
|
As a convenience, if you have multiple single-source transformations of a file, you may pass an
|
||||||
|
array of sourcemap files in the order of most-recent transformation sourcemap first. Note that this
|
||||||
|
changes the `importer` and `depth` of each call to our loader. So our above example could have been
|
||||||
|
written as:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const remapped = remapping(
|
||||||
|
[minifiedTransformedMap, transformedMap],
|
||||||
|
() => null
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log(remapped);
|
||||||
|
// {
|
||||||
|
// file: 'transpiled.min.js',
|
||||||
|
// mappings: 'AAEE',
|
||||||
|
// sources: ['helloworld.js'],
|
||||||
|
// version: 3,
|
||||||
|
// };
|
||||||
|
```
|
||||||
|
|
||||||
|
### Advanced control of the loading graph
|
||||||
|
|
||||||
|
#### `source`
|
||||||
|
|
||||||
|
The `source` property can overridden to any value to change the location of the current load. Eg,
|
||||||
|
for an original source file, it allows us to change the location to the original source regardless
|
||||||
|
of what the sourcemap source entry says. And for transformed files, it allows us to change the
|
||||||
|
relative resolving location for child sources of the loaded sourcemap.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const remapped = remapping(
|
||||||
|
minifiedTransformedMap,
|
||||||
|
(file, ctx) => {
|
||||||
|
|
||||||
|
if (file === 'transformed.js') {
|
||||||
|
// We pretend the transformed.js file actually exists in the 'src/' directory. When the nested
|
||||||
|
// source files are loaded, they will now be relative to `src/`.
|
||||||
|
ctx.source = 'src/transformed.js';
|
||||||
|
return transformedMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.assert(file === 'src/helloworld.js');
|
||||||
|
// We could futher change the source of this original file, eg, to be inside a nested directory
|
||||||
|
// itself. This will be reflected in the remapped sourcemap.
|
||||||
|
ctx.source = 'src/nested/transformed.js';
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log(remapped);
|
||||||
|
// {
|
||||||
|
// …,
|
||||||
|
// sources: ['src/nested/helloworld.js'],
|
||||||
|
// };
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
#### `content`
|
||||||
|
|
||||||
|
The `content` property can be overridden when we encounter an original source file. Eg, this allows
|
||||||
|
you to manually provide the source content of the original file regardless of whether the
|
||||||
|
`sourcesContent` field is present in the parent sourcemap. It can also be set to `null` to remove
|
||||||
|
the source content.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const remapped = remapping(
|
||||||
|
minifiedTransformedMap,
|
||||||
|
(file, ctx) => {
|
||||||
|
|
||||||
|
if (file === 'transformed.js') {
|
||||||
|
// transformedMap does not include a `sourcesContent` field, so usually the remapped sourcemap
|
||||||
|
// would not include any `sourcesContent` values.
|
||||||
|
return transformedMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.assert(file === 'helloworld.js');
|
||||||
|
// We can read the file to provide the source content.
|
||||||
|
ctx.content = fs.readFileSync(file, 'utf8');
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log(remapped);
|
||||||
|
// {
|
||||||
|
// …,
|
||||||
|
// sourcesContent: [
|
||||||
|
// 'console.log("Hello world!")',
|
||||||
|
// ],
|
||||||
|
// };
|
||||||
|
```
|
||||||
|
|
||||||
|
### Options
|
||||||
|
|
||||||
|
#### excludeContent
|
||||||
|
|
||||||
|
By default, `excludeContent` is `false`. Passing `{ excludeContent: true }` will exclude the
|
||||||
|
`sourcesContent` field from the returned sourcemap. This is mainly useful when you want to reduce
|
||||||
|
the size out the sourcemap.
|
||||||
|
|
||||||
|
#### decodedMappings
|
||||||
|
|
||||||
|
By default, `decodedMappings` is `false`. Passing `{ decodedMappings: true }` will leave the
|
||||||
|
`mappings` field in a [decoded state](https://github.com/rich-harris/sourcemap-codec) instead of
|
||||||
|
encoding into a VLQ string.
|
|
@ -0,0 +1,204 @@
|
||||||
|
import { decodedMappings, traceSegment, TraceMap } from '@jridgewell/trace-mapping';
|
||||||
|
import { GenMapping, addSegment, setSourceContent, decodedMap, encodedMap } from '@jridgewell/gen-mapping';
|
||||||
|
|
||||||
|
const SOURCELESS_MAPPING = {
|
||||||
|
source: null,
|
||||||
|
column: null,
|
||||||
|
line: null,
|
||||||
|
name: null,
|
||||||
|
content: null,
|
||||||
|
};
|
||||||
|
const EMPTY_SOURCES = [];
|
||||||
|
function Source(map, sources, source, content) {
|
||||||
|
return {
|
||||||
|
map,
|
||||||
|
sources,
|
||||||
|
source,
|
||||||
|
content,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
||||||
|
* (which may themselves be SourceMapTrees).
|
||||||
|
*/
|
||||||
|
function MapSource(map, sources) {
|
||||||
|
return Source(map, sources, '', null);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
||||||
|
* segment tracing ends at the `OriginalSource`.
|
||||||
|
*/
|
||||||
|
function OriginalSource(source, content) {
|
||||||
|
return Source(null, EMPTY_SOURCES, source, content);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
||||||
|
* resolving each mapping in terms of the original source files.
|
||||||
|
*/
|
||||||
|
function traceMappings(tree) {
|
||||||
|
const gen = new GenMapping({ file: tree.map.file });
|
||||||
|
const { sources: rootSources, map } = tree;
|
||||||
|
const rootNames = map.names;
|
||||||
|
const rootMappings = decodedMappings(map);
|
||||||
|
for (let i = 0; i < rootMappings.length; i++) {
|
||||||
|
const segments = rootMappings[i];
|
||||||
|
let lastSource = null;
|
||||||
|
let lastSourceLine = null;
|
||||||
|
let lastSourceColumn = null;
|
||||||
|
for (let j = 0; j < segments.length; j++) {
|
||||||
|
const segment = segments[j];
|
||||||
|
const genCol = segment[0];
|
||||||
|
let traced = SOURCELESS_MAPPING;
|
||||||
|
// 1-length segments only move the current generated column, there's no source information
|
||||||
|
// to gather from it.
|
||||||
|
if (segment.length !== 1) {
|
||||||
|
const source = rootSources[segment[1]];
|
||||||
|
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
|
||||||
|
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
|
||||||
|
// respective segment into an original source.
|
||||||
|
if (traced == null)
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// So we traced a segment down into its original source file. Now push a
|
||||||
|
// new segment pointing to this location.
|
||||||
|
const { column, line, name, content, source } = traced;
|
||||||
|
if (line === lastSourceLine && column === lastSourceColumn && source === lastSource) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
lastSourceLine = line;
|
||||||
|
lastSourceColumn = column;
|
||||||
|
lastSource = source;
|
||||||
|
// Sigh, TypeScript can't figure out source/line/column are either all null, or all non-null...
|
||||||
|
addSegment(gen, i, genCol, source, line, column, name);
|
||||||
|
if (content != null)
|
||||||
|
setSourceContent(gen, source, content);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return gen;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
||||||
|
* child SourceMapTrees, until we find the original source map.
|
||||||
|
*/
|
||||||
|
function originalPositionFor(source, line, column, name) {
|
||||||
|
if (!source.map) {
|
||||||
|
return { column, line, name, source: source.source, content: source.content };
|
||||||
|
}
|
||||||
|
const segment = traceSegment(source.map, line, column);
|
||||||
|
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
|
||||||
|
if (segment == null)
|
||||||
|
return null;
|
||||||
|
// 1-length segments only move the current generated column, there's no source information
|
||||||
|
// to gather from it.
|
||||||
|
if (segment.length === 1)
|
||||||
|
return SOURCELESS_MAPPING;
|
||||||
|
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
|
||||||
|
}
|
||||||
|
|
||||||
|
function asArray(value) {
|
||||||
|
if (Array.isArray(value))
|
||||||
|
return value;
|
||||||
|
return [value];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Recursively builds a tree structure out of sourcemap files, with each node
|
||||||
|
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
||||||
|
* `OriginalSource`s and `SourceMapTree`s.
|
||||||
|
*
|
||||||
|
* Every sourcemap is composed of a collection of source files and mappings
|
||||||
|
* into locations of those source files. When we generate a `SourceMapTree` for
|
||||||
|
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
||||||
|
* does not have an associated sourcemap, it is considered an original,
|
||||||
|
* unmodified source file.
|
||||||
|
*/
|
||||||
|
function buildSourceMapTree(input, loader) {
|
||||||
|
const maps = asArray(input).map((m) => new TraceMap(m, ''));
|
||||||
|
const map = maps.pop();
|
||||||
|
for (let i = 0; i < maps.length; i++) {
|
||||||
|
if (maps[i].sources.length > 1) {
|
||||||
|
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
|
||||||
|
'Did you specify these with the most recent transformation maps first?');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let tree = build(map, loader, '', 0);
|
||||||
|
for (let i = maps.length - 1; i >= 0; i--) {
|
||||||
|
tree = MapSource(maps[i], [tree]);
|
||||||
|
}
|
||||||
|
return tree;
|
||||||
|
}
|
||||||
|
function build(map, loader, importer, importerDepth) {
|
||||||
|
const { resolvedSources, sourcesContent } = map;
|
||||||
|
const depth = importerDepth + 1;
|
||||||
|
const children = resolvedSources.map((sourceFile, i) => {
|
||||||
|
// The loading context gives the loader more information about why this file is being loaded
|
||||||
|
// (eg, from which importer). It also allows the loader to override the location of the loaded
|
||||||
|
// sourcemap/original source, or to override the content in the sourcesContent field if it's
|
||||||
|
// an unmodified source file.
|
||||||
|
const ctx = {
|
||||||
|
importer,
|
||||||
|
depth,
|
||||||
|
source: sourceFile || '',
|
||||||
|
content: undefined,
|
||||||
|
};
|
||||||
|
// Use the provided loader callback to retrieve the file's sourcemap.
|
||||||
|
// TODO: We should eventually support async loading of sourcemap files.
|
||||||
|
const sourceMap = loader(ctx.source, ctx);
|
||||||
|
const { source, content } = ctx;
|
||||||
|
// If there is a sourcemap, then we need to recurse into it to load its source files.
|
||||||
|
if (sourceMap)
|
||||||
|
return build(new TraceMap(sourceMap, source), loader, source, depth);
|
||||||
|
// Else, it's an an unmodified source file.
|
||||||
|
// The contents of this unmodified source file can be overridden via the loader context,
|
||||||
|
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
|
||||||
|
// the importing sourcemap's `sourcesContent` field.
|
||||||
|
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
|
||||||
|
return OriginalSource(source, sourceContent);
|
||||||
|
});
|
||||||
|
return MapSource(map, children);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
||||||
|
* provided to it.
|
||||||
|
*/
|
||||||
|
class SourceMap {
|
||||||
|
constructor(map, options) {
|
||||||
|
const out = options.decodedMappings ? decodedMap(map) : encodedMap(map);
|
||||||
|
this.version = out.version; // SourceMap spec says this should be first.
|
||||||
|
this.file = out.file;
|
||||||
|
this.mappings = out.mappings;
|
||||||
|
this.names = out.names;
|
||||||
|
this.sourceRoot = out.sourceRoot;
|
||||||
|
this.sources = out.sources;
|
||||||
|
if (!options.excludeContent) {
|
||||||
|
this.sourcesContent = out.sourcesContent;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
toString() {
|
||||||
|
return JSON.stringify(this);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Traces through all the mappings in the root sourcemap, through the sources
|
||||||
|
* (and their sourcemaps), all the way back to the original source location.
|
||||||
|
*
|
||||||
|
* `loader` will be called every time we encounter a source file. If it returns
|
||||||
|
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
||||||
|
* it returns a falsey value, that source file is treated as an original,
|
||||||
|
* unmodified source file.
|
||||||
|
*
|
||||||
|
* Pass `excludeContent` to exclude any self-containing source file content
|
||||||
|
* from the output sourcemap.
|
||||||
|
*
|
||||||
|
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
||||||
|
* VLQ encoded) mappings.
|
||||||
|
*/
|
||||||
|
function remapping(input, loader, options) {
|
||||||
|
const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };
|
||||||
|
const tree = buildSourceMapTree(input, loader);
|
||||||
|
return new SourceMap(traceMappings(tree), opts);
|
||||||
|
}
|
||||||
|
|
||||||
|
export { remapping as default };
|
||||||
|
//# sourceMappingURL=remapping.mjs.map
|
File diff suppressed because one or more lines are too long
209
frontend/node_modules/@ampproject/remapping/dist/remapping.umd.js
generated
vendored
Normal file
209
frontend/node_modules/@ampproject/remapping/dist/remapping.umd.js
generated
vendored
Normal file
|
@ -0,0 +1,209 @@
|
||||||
|
(function (global, factory) {
|
||||||
|
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory(require('@jridgewell/trace-mapping'), require('@jridgewell/gen-mapping')) :
|
||||||
|
typeof define === 'function' && define.amd ? define(['@jridgewell/trace-mapping', '@jridgewell/gen-mapping'], factory) :
|
||||||
|
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.remapping = factory(global.traceMapping, global.genMapping));
|
||||||
|
})(this, (function (traceMapping, genMapping) { 'use strict';
|
||||||
|
|
||||||
|
const SOURCELESS_MAPPING = {
|
||||||
|
source: null,
|
||||||
|
column: null,
|
||||||
|
line: null,
|
||||||
|
name: null,
|
||||||
|
content: null,
|
||||||
|
};
|
||||||
|
const EMPTY_SOURCES = [];
|
||||||
|
function Source(map, sources, source, content) {
|
||||||
|
return {
|
||||||
|
map,
|
||||||
|
sources,
|
||||||
|
source,
|
||||||
|
content,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
||||||
|
* (which may themselves be SourceMapTrees).
|
||||||
|
*/
|
||||||
|
function MapSource(map, sources) {
|
||||||
|
return Source(map, sources, '', null);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
||||||
|
* segment tracing ends at the `OriginalSource`.
|
||||||
|
*/
|
||||||
|
function OriginalSource(source, content) {
|
||||||
|
return Source(null, EMPTY_SOURCES, source, content);
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
||||||
|
* resolving each mapping in terms of the original source files.
|
||||||
|
*/
|
||||||
|
function traceMappings(tree) {
|
||||||
|
const gen = new genMapping.GenMapping({ file: tree.map.file });
|
||||||
|
const { sources: rootSources, map } = tree;
|
||||||
|
const rootNames = map.names;
|
||||||
|
const rootMappings = traceMapping.decodedMappings(map);
|
||||||
|
for (let i = 0; i < rootMappings.length; i++) {
|
||||||
|
const segments = rootMappings[i];
|
||||||
|
let lastSource = null;
|
||||||
|
let lastSourceLine = null;
|
||||||
|
let lastSourceColumn = null;
|
||||||
|
for (let j = 0; j < segments.length; j++) {
|
||||||
|
const segment = segments[j];
|
||||||
|
const genCol = segment[0];
|
||||||
|
let traced = SOURCELESS_MAPPING;
|
||||||
|
// 1-length segments only move the current generated column, there's no source information
|
||||||
|
// to gather from it.
|
||||||
|
if (segment.length !== 1) {
|
||||||
|
const source = rootSources[segment[1]];
|
||||||
|
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
|
||||||
|
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
|
||||||
|
// respective segment into an original source.
|
||||||
|
if (traced == null)
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// So we traced a segment down into its original source file. Now push a
|
||||||
|
// new segment pointing to this location.
|
||||||
|
const { column, line, name, content, source } = traced;
|
||||||
|
if (line === lastSourceLine && column === lastSourceColumn && source === lastSource) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
lastSourceLine = line;
|
||||||
|
lastSourceColumn = column;
|
||||||
|
lastSource = source;
|
||||||
|
// Sigh, TypeScript can't figure out source/line/column are either all null, or all non-null...
|
||||||
|
genMapping.addSegment(gen, i, genCol, source, line, column, name);
|
||||||
|
if (content != null)
|
||||||
|
genMapping.setSourceContent(gen, source, content);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return gen;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
||||||
|
* child SourceMapTrees, until we find the original source map.
|
||||||
|
*/
|
||||||
|
function originalPositionFor(source, line, column, name) {
|
||||||
|
if (!source.map) {
|
||||||
|
return { column, line, name, source: source.source, content: source.content };
|
||||||
|
}
|
||||||
|
const segment = traceMapping.traceSegment(source.map, line, column);
|
||||||
|
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
|
||||||
|
if (segment == null)
|
||||||
|
return null;
|
||||||
|
// 1-length segments only move the current generated column, there's no source information
|
||||||
|
// to gather from it.
|
||||||
|
if (segment.length === 1)
|
||||||
|
return SOURCELESS_MAPPING;
|
||||||
|
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
|
||||||
|
}
|
||||||
|
|
||||||
|
function asArray(value) {
|
||||||
|
if (Array.isArray(value))
|
||||||
|
return value;
|
||||||
|
return [value];
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Recursively builds a tree structure out of sourcemap files, with each node
|
||||||
|
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
||||||
|
* `OriginalSource`s and `SourceMapTree`s.
|
||||||
|
*
|
||||||
|
* Every sourcemap is composed of a collection of source files and mappings
|
||||||
|
* into locations of those source files. When we generate a `SourceMapTree` for
|
||||||
|
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
||||||
|
* does not have an associated sourcemap, it is considered an original,
|
||||||
|
* unmodified source file.
|
||||||
|
*/
|
||||||
|
function buildSourceMapTree(input, loader) {
|
||||||
|
const maps = asArray(input).map((m) => new traceMapping.TraceMap(m, ''));
|
||||||
|
const map = maps.pop();
|
||||||
|
for (let i = 0; i < maps.length; i++) {
|
||||||
|
if (maps[i].sources.length > 1) {
|
||||||
|
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
|
||||||
|
'Did you specify these with the most recent transformation maps first?');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let tree = build(map, loader, '', 0);
|
||||||
|
for (let i = maps.length - 1; i >= 0; i--) {
|
||||||
|
tree = MapSource(maps[i], [tree]);
|
||||||
|
}
|
||||||
|
return tree;
|
||||||
|
}
|
||||||
|
function build(map, loader, importer, importerDepth) {
|
||||||
|
const { resolvedSources, sourcesContent } = map;
|
||||||
|
const depth = importerDepth + 1;
|
||||||
|
const children = resolvedSources.map((sourceFile, i) => {
|
||||||
|
// The loading context gives the loader more information about why this file is being loaded
|
||||||
|
// (eg, from which importer). It also allows the loader to override the location of the loaded
|
||||||
|
// sourcemap/original source, or to override the content in the sourcesContent field if it's
|
||||||
|
// an unmodified source file.
|
||||||
|
const ctx = {
|
||||||
|
importer,
|
||||||
|
depth,
|
||||||
|
source: sourceFile || '',
|
||||||
|
content: undefined,
|
||||||
|
};
|
||||||
|
// Use the provided loader callback to retrieve the file's sourcemap.
|
||||||
|
// TODO: We should eventually support async loading of sourcemap files.
|
||||||
|
const sourceMap = loader(ctx.source, ctx);
|
||||||
|
const { source, content } = ctx;
|
||||||
|
// If there is a sourcemap, then we need to recurse into it to load its source files.
|
||||||
|
if (sourceMap)
|
||||||
|
return build(new traceMapping.TraceMap(sourceMap, source), loader, source, depth);
|
||||||
|
// Else, it's an an unmodified source file.
|
||||||
|
// The contents of this unmodified source file can be overridden via the loader context,
|
||||||
|
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
|
||||||
|
// the importing sourcemap's `sourcesContent` field.
|
||||||
|
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
|
||||||
|
return OriginalSource(source, sourceContent);
|
||||||
|
});
|
||||||
|
return MapSource(map, children);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
||||||
|
* provided to it.
|
||||||
|
*/
|
||||||
|
class SourceMap {
|
||||||
|
constructor(map, options) {
|
||||||
|
const out = options.decodedMappings ? genMapping.decodedMap(map) : genMapping.encodedMap(map);
|
||||||
|
this.version = out.version; // SourceMap spec says this should be first.
|
||||||
|
this.file = out.file;
|
||||||
|
this.mappings = out.mappings;
|
||||||
|
this.names = out.names;
|
||||||
|
this.sourceRoot = out.sourceRoot;
|
||||||
|
this.sources = out.sources;
|
||||||
|
if (!options.excludeContent) {
|
||||||
|
this.sourcesContent = out.sourcesContent;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
toString() {
|
||||||
|
return JSON.stringify(this);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Traces through all the mappings in the root sourcemap, through the sources
|
||||||
|
* (and their sourcemaps), all the way back to the original source location.
|
||||||
|
*
|
||||||
|
* `loader` will be called every time we encounter a source file. If it returns
|
||||||
|
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
||||||
|
* it returns a falsey value, that source file is treated as an original,
|
||||||
|
* unmodified source file.
|
||||||
|
*
|
||||||
|
* Pass `excludeContent` to exclude any self-containing source file content
|
||||||
|
* from the output sourcemap.
|
||||||
|
*
|
||||||
|
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
||||||
|
* VLQ encoded) mappings.
|
||||||
|
*/
|
||||||
|
function remapping(input, loader, options) {
|
||||||
|
const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };
|
||||||
|
const tree = buildSourceMapTree(input, loader);
|
||||||
|
return new SourceMap(traceMappings(tree), opts);
|
||||||
|
}
|
||||||
|
|
||||||
|
return remapping;
|
||||||
|
|
||||||
|
}));
|
||||||
|
//# sourceMappingURL=remapping.umd.js.map
|
1
frontend/node_modules/@ampproject/remapping/dist/remapping.umd.js.map
generated
vendored
Normal file
1
frontend/node_modules/@ampproject/remapping/dist/remapping.umd.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
14
frontend/node_modules/@ampproject/remapping/dist/types/build-source-map-tree.d.ts
generated
vendored
Normal file
14
frontend/node_modules/@ampproject/remapping/dist/types/build-source-map-tree.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
import type { MapSource as MapSourceType } from './source-map-tree';
|
||||||
|
import type { SourceMapInput, SourceMapLoader } from './types';
|
||||||
|
/**
|
||||||
|
* Recursively builds a tree structure out of sourcemap files, with each node
|
||||||
|
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
||||||
|
* `OriginalSource`s and `SourceMapTree`s.
|
||||||
|
*
|
||||||
|
* Every sourcemap is composed of a collection of source files and mappings
|
||||||
|
* into locations of those source files. When we generate a `SourceMapTree` for
|
||||||
|
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
||||||
|
* does not have an associated sourcemap, it is considered an original,
|
||||||
|
* unmodified source file.
|
||||||
|
*/
|
||||||
|
export default function buildSourceMapTree(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader): MapSourceType;
|
19
frontend/node_modules/@ampproject/remapping/dist/types/remapping.d.ts
generated
vendored
Normal file
19
frontend/node_modules/@ampproject/remapping/dist/types/remapping.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
import SourceMap from './source-map';
|
||||||
|
import type { SourceMapInput, SourceMapLoader, Options } from './types';
|
||||||
|
export type { SourceMapSegment, EncodedSourceMap, EncodedSourceMap as RawSourceMap, DecodedSourceMap, SourceMapInput, SourceMapLoader, LoaderContext, Options, } from './types';
|
||||||
|
/**
|
||||||
|
* Traces through all the mappings in the root sourcemap, through the sources
|
||||||
|
* (and their sourcemaps), all the way back to the original source location.
|
||||||
|
*
|
||||||
|
* `loader` will be called every time we encounter a source file. If it returns
|
||||||
|
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
||||||
|
* it returns a falsey value, that source file is treated as an original,
|
||||||
|
* unmodified source file.
|
||||||
|
*
|
||||||
|
* Pass `excludeContent` to exclude any self-containing source file content
|
||||||
|
* from the output sourcemap.
|
||||||
|
*
|
||||||
|
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
||||||
|
* VLQ encoded) mappings.
|
||||||
|
*/
|
||||||
|
export default function remapping(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader, options?: boolean | Options): SourceMap;
|
48
frontend/node_modules/@ampproject/remapping/dist/types/source-map-tree.d.ts
generated
vendored
Normal file
48
frontend/node_modules/@ampproject/remapping/dist/types/source-map-tree.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
import { GenMapping } from '@jridgewell/gen-mapping';
|
||||||
|
import type { TraceMap } from '@jridgewell/trace-mapping';
|
||||||
|
export declare type SourceMapSegmentObject = {
|
||||||
|
column: number;
|
||||||
|
line: number;
|
||||||
|
name: string;
|
||||||
|
source: string;
|
||||||
|
content: string | null;
|
||||||
|
} | {
|
||||||
|
column: null;
|
||||||
|
line: null;
|
||||||
|
name: null;
|
||||||
|
source: null;
|
||||||
|
content: null;
|
||||||
|
};
|
||||||
|
export declare type OriginalSource = {
|
||||||
|
map: TraceMap;
|
||||||
|
sources: Sources[];
|
||||||
|
source: string;
|
||||||
|
content: string | null;
|
||||||
|
};
|
||||||
|
export declare type MapSource = {
|
||||||
|
map: TraceMap;
|
||||||
|
sources: Sources[];
|
||||||
|
source: string;
|
||||||
|
content: string | null;
|
||||||
|
};
|
||||||
|
export declare type Sources = OriginalSource | MapSource;
|
||||||
|
/**
|
||||||
|
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
||||||
|
* (which may themselves be SourceMapTrees).
|
||||||
|
*/
|
||||||
|
export declare function MapSource(map: TraceMap, sources: Sources[]): MapSource;
|
||||||
|
/**
|
||||||
|
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
||||||
|
* segment tracing ends at the `OriginalSource`.
|
||||||
|
*/
|
||||||
|
export declare function OriginalSource(source: string, content: string | null): OriginalSource;
|
||||||
|
/**
|
||||||
|
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
||||||
|
* resolving each mapping in terms of the original source files.
|
||||||
|
*/
|
||||||
|
export declare function traceMappings(tree: MapSource): GenMapping;
|
||||||
|
/**
|
||||||
|
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
||||||
|
* child SourceMapTrees, until we find the original source map.
|
||||||
|
*/
|
||||||
|
export declare function originalPositionFor(source: Sources, line: number, column: number, name: string): SourceMapSegmentObject | null;
|
17
frontend/node_modules/@ampproject/remapping/dist/types/source-map.d.ts
generated
vendored
Normal file
17
frontend/node_modules/@ampproject/remapping/dist/types/source-map.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
import type { GenMapping } from '@jridgewell/gen-mapping';
|
||||||
|
import type { DecodedSourceMap, EncodedSourceMap, Options } from './types';
|
||||||
|
/**
|
||||||
|
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
||||||
|
* provided to it.
|
||||||
|
*/
|
||||||
|
export default class SourceMap {
|
||||||
|
file?: string | null;
|
||||||
|
mappings: EncodedSourceMap['mappings'] | DecodedSourceMap['mappings'];
|
||||||
|
sourceRoot?: string;
|
||||||
|
names: string[];
|
||||||
|
sources: (string | null)[];
|
||||||
|
sourcesContent?: (string | null)[];
|
||||||
|
version: 3;
|
||||||
|
constructor(map: GenMapping, options: Options);
|
||||||
|
toString(): string;
|
||||||
|
}
|
|
@ -0,0 +1,14 @@
|
||||||
|
import type { SourceMapInput } from '@jridgewell/trace-mapping';
|
||||||
|
export type { SourceMapSegment, DecodedSourceMap, EncodedSourceMap, } from '@jridgewell/trace-mapping';
|
||||||
|
export type { SourceMapInput };
|
||||||
|
export declare type LoaderContext = {
|
||||||
|
readonly importer: string;
|
||||||
|
readonly depth: number;
|
||||||
|
source: string;
|
||||||
|
content: string | null | undefined;
|
||||||
|
};
|
||||||
|
export declare type SourceMapLoader = (file: string, ctx: LoaderContext) => SourceMapInput | null | undefined | void;
|
||||||
|
export declare type Options = {
|
||||||
|
excludeContent?: boolean;
|
||||||
|
decodedMappings?: boolean;
|
||||||
|
};
|
|
@ -0,0 +1,63 @@
|
||||||
|
{
|
||||||
|
"name": "@ampproject/remapping",
|
||||||
|
"version": "2.2.0",
|
||||||
|
"description": "Remap sequential sourcemaps through transformations to point at the original source code",
|
||||||
|
"keywords": [
|
||||||
|
"source",
|
||||||
|
"map",
|
||||||
|
"remap"
|
||||||
|
],
|
||||||
|
"main": "dist/remapping.umd.js",
|
||||||
|
"module": "dist/remapping.mjs",
|
||||||
|
"typings": "dist/types/remapping.d.ts",
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
],
|
||||||
|
"author": "Justin Ridgewell <jridgewell@google.com>",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/ampproject/remapping.git"
|
||||||
|
},
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6.0.0"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"build": "run-s -n build:*",
|
||||||
|
"build:rollup": "rollup -c rollup.config.js",
|
||||||
|
"build:ts": "tsc --project tsconfig.build.json",
|
||||||
|
"lint": "run-s -n lint:*",
|
||||||
|
"lint:prettier": "npm run test:lint:prettier -- --write",
|
||||||
|
"lint:ts": "npm run test:lint:ts -- --fix",
|
||||||
|
"prebuild": "rm -rf dist",
|
||||||
|
"prepublishOnly": "npm run preversion",
|
||||||
|
"preversion": "run-s test build",
|
||||||
|
"test": "run-s -n test:lint test:only",
|
||||||
|
"test:debug": "node --inspect-brk node_modules/.bin/jest --runInBand",
|
||||||
|
"test:lint": "run-s -n test:lint:*",
|
||||||
|
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
||||||
|
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
||||||
|
"test:only": "jest --coverage",
|
||||||
|
"test:watch": "jest --coverage --watch"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@rollup/plugin-typescript": "8.3.2",
|
||||||
|
"@types/jest": "27.4.1",
|
||||||
|
"@typescript-eslint/eslint-plugin": "5.20.0",
|
||||||
|
"@typescript-eslint/parser": "5.20.0",
|
||||||
|
"eslint": "8.14.0",
|
||||||
|
"eslint-config-prettier": "8.5.0",
|
||||||
|
"jest": "27.5.1",
|
||||||
|
"jest-config": "27.5.1",
|
||||||
|
"npm-run-all": "4.1.5",
|
||||||
|
"prettier": "2.6.2",
|
||||||
|
"rollup": "2.70.2",
|
||||||
|
"ts-jest": "27.1.4",
|
||||||
|
"tslib": "2.4.0",
|
||||||
|
"typescript": "4.6.3"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@jridgewell/gen-mapping": "^0.1.0",
|
||||||
|
"@jridgewell/trace-mapping": "^0.3.9"
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,22 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2014-present Sebastian McKenzie and other contributors
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining
|
||||||
|
a copy of this software and associated documentation files (the
|
||||||
|
"Software"), to deal in the Software without restriction, including
|
||||||
|
without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
distribute, sublicense, and/or sell copies of the Software, and to
|
||||||
|
permit persons to whom the Software is furnished to do so, subject to
|
||||||
|
the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be
|
||||||
|
included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||||
|
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||||
|
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||||
|
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||||
|
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
@ -0,0 +1,19 @@
|
||||||
|
# @babel/code-frame
|
||||||
|
|
||||||
|
> Generate errors that contain a code frame that point to source locations.
|
||||||
|
|
||||||
|
See our website [@babel/code-frame](https://babeljs.io/docs/en/babel-code-frame) for more information.
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
Using npm:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install --save-dev @babel/code-frame
|
||||||
|
```
|
||||||
|
|
||||||
|
or using yarn:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
yarn add @babel/code-frame --dev
|
||||||
|
```
|
|
@ -0,0 +1,163 @@
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
Object.defineProperty(exports, "__esModule", {
|
||||||
|
value: true
|
||||||
|
});
|
||||||
|
exports.codeFrameColumns = codeFrameColumns;
|
||||||
|
exports.default = _default;
|
||||||
|
|
||||||
|
var _highlight = require("@babel/highlight");
|
||||||
|
|
||||||
|
let deprecationWarningShown = false;
|
||||||
|
|
||||||
|
function getDefs(chalk) {
|
||||||
|
return {
|
||||||
|
gutter: chalk.grey,
|
||||||
|
marker: chalk.red.bold,
|
||||||
|
message: chalk.red.bold
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const NEWLINE = /\r\n|[\n\r\u2028\u2029]/;
|
||||||
|
|
||||||
|
function getMarkerLines(loc, source, opts) {
|
||||||
|
const startLoc = Object.assign({
|
||||||
|
column: 0,
|
||||||
|
line: -1
|
||||||
|
}, loc.start);
|
||||||
|
const endLoc = Object.assign({}, startLoc, loc.end);
|
||||||
|
const {
|
||||||
|
linesAbove = 2,
|
||||||
|
linesBelow = 3
|
||||||
|
} = opts || {};
|
||||||
|
const startLine = startLoc.line;
|
||||||
|
const startColumn = startLoc.column;
|
||||||
|
const endLine = endLoc.line;
|
||||||
|
const endColumn = endLoc.column;
|
||||||
|
let start = Math.max(startLine - (linesAbove + 1), 0);
|
||||||
|
let end = Math.min(source.length, endLine + linesBelow);
|
||||||
|
|
||||||
|
if (startLine === -1) {
|
||||||
|
start = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (endLine === -1) {
|
||||||
|
end = source.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
const lineDiff = endLine - startLine;
|
||||||
|
const markerLines = {};
|
||||||
|
|
||||||
|
if (lineDiff) {
|
||||||
|
for (let i = 0; i <= lineDiff; i++) {
|
||||||
|
const lineNumber = i + startLine;
|
||||||
|
|
||||||
|
if (!startColumn) {
|
||||||
|
markerLines[lineNumber] = true;
|
||||||
|
} else if (i === 0) {
|
||||||
|
const sourceLength = source[lineNumber - 1].length;
|
||||||
|
markerLines[lineNumber] = [startColumn, sourceLength - startColumn + 1];
|
||||||
|
} else if (i === lineDiff) {
|
||||||
|
markerLines[lineNumber] = [0, endColumn];
|
||||||
|
} else {
|
||||||
|
const sourceLength = source[lineNumber - i].length;
|
||||||
|
markerLines[lineNumber] = [0, sourceLength];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (startColumn === endColumn) {
|
||||||
|
if (startColumn) {
|
||||||
|
markerLines[startLine] = [startColumn, 0];
|
||||||
|
} else {
|
||||||
|
markerLines[startLine] = true;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
markerLines[startLine] = [startColumn, endColumn - startColumn];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
start,
|
||||||
|
end,
|
||||||
|
markerLines
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function codeFrameColumns(rawLines, loc, opts = {}) {
|
||||||
|
const highlighted = (opts.highlightCode || opts.forceColor) && (0, _highlight.shouldHighlight)(opts);
|
||||||
|
const chalk = (0, _highlight.getChalk)(opts);
|
||||||
|
const defs = getDefs(chalk);
|
||||||
|
|
||||||
|
const maybeHighlight = (chalkFn, string) => {
|
||||||
|
return highlighted ? chalkFn(string) : string;
|
||||||
|
};
|
||||||
|
|
||||||
|
const lines = rawLines.split(NEWLINE);
|
||||||
|
const {
|
||||||
|
start,
|
||||||
|
end,
|
||||||
|
markerLines
|
||||||
|
} = getMarkerLines(loc, lines, opts);
|
||||||
|
const hasColumns = loc.start && typeof loc.start.column === "number";
|
||||||
|
const numberMaxWidth = String(end).length;
|
||||||
|
const highlightedLines = highlighted ? (0, _highlight.default)(rawLines, opts) : rawLines;
|
||||||
|
let frame = highlightedLines.split(NEWLINE, end).slice(start, end).map((line, index) => {
|
||||||
|
const number = start + 1 + index;
|
||||||
|
const paddedNumber = ` ${number}`.slice(-numberMaxWidth);
|
||||||
|
const gutter = ` ${paddedNumber} |`;
|
||||||
|
const hasMarker = markerLines[number];
|
||||||
|
const lastMarkerLine = !markerLines[number + 1];
|
||||||
|
|
||||||
|
if (hasMarker) {
|
||||||
|
let markerLine = "";
|
||||||
|
|
||||||
|
if (Array.isArray(hasMarker)) {
|
||||||
|
const markerSpacing = line.slice(0, Math.max(hasMarker[0] - 1, 0)).replace(/[^\t]/g, " ");
|
||||||
|
const numberOfMarkers = hasMarker[1] || 1;
|
||||||
|
markerLine = ["\n ", maybeHighlight(defs.gutter, gutter.replace(/\d/g, " ")), " ", markerSpacing, maybeHighlight(defs.marker, "^").repeat(numberOfMarkers)].join("");
|
||||||
|
|
||||||
|
if (lastMarkerLine && opts.message) {
|
||||||
|
markerLine += " " + maybeHighlight(defs.message, opts.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return [maybeHighlight(defs.marker, ">"), maybeHighlight(defs.gutter, gutter), line.length > 0 ? ` ${line}` : "", markerLine].join("");
|
||||||
|
} else {
|
||||||
|
return ` ${maybeHighlight(defs.gutter, gutter)}${line.length > 0 ? ` ${line}` : ""}`;
|
||||||
|
}
|
||||||
|
}).join("\n");
|
||||||
|
|
||||||
|
if (opts.message && !hasColumns) {
|
||||||
|
frame = `${" ".repeat(numberMaxWidth + 1)}${opts.message}\n${frame}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (highlighted) {
|
||||||
|
return chalk.reset(frame);
|
||||||
|
} else {
|
||||||
|
return frame;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function _default(rawLines, lineNumber, colNumber, opts = {}) {
|
||||||
|
if (!deprecationWarningShown) {
|
||||||
|
deprecationWarningShown = true;
|
||||||
|
const message = "Passing lineNumber and colNumber is deprecated to @babel/code-frame. Please use `codeFrameColumns`.";
|
||||||
|
|
||||||
|
if (process.emitWarning) {
|
||||||
|
process.emitWarning(message, "DeprecationWarning");
|
||||||
|
} else {
|
||||||
|
const deprecationError = new Error(message);
|
||||||
|
deprecationError.name = "DeprecationWarning";
|
||||||
|
console.warn(new Error(message));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
colNumber = Math.max(colNumber, 0);
|
||||||
|
const location = {
|
||||||
|
start: {
|
||||||
|
column: colNumber,
|
||||||
|
line: lineNumber
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return codeFrameColumns(rawLines, location, opts);
|
||||||
|
}
|
|
@ -0,0 +1,30 @@
|
||||||
|
{
|
||||||
|
"name": "@babel/code-frame",
|
||||||
|
"version": "7.18.6",
|
||||||
|
"description": "Generate errors that contain a code frame that point to source locations.",
|
||||||
|
"author": "The Babel Team (https://babel.dev/team)",
|
||||||
|
"homepage": "https://babel.dev/docs/en/next/babel-code-frame",
|
||||||
|
"bugs": "https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen",
|
||||||
|
"license": "MIT",
|
||||||
|
"publishConfig": {
|
||||||
|
"access": "public"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/babel/babel.git",
|
||||||
|
"directory": "packages/babel-code-frame"
|
||||||
|
},
|
||||||
|
"main": "./lib/index.js",
|
||||||
|
"dependencies": {
|
||||||
|
"@babel/highlight": "^7.18.6"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/chalk": "^2.0.0",
|
||||||
|
"chalk": "^2.0.0",
|
||||||
|
"strip-ansi": "^4.0.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6.9.0"
|
||||||
|
},
|
||||||
|
"type": "commonjs"
|
||||||
|
}
|
|
@ -0,0 +1,22 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2014-present Sebastian McKenzie and other contributors
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining
|
||||||
|
a copy of this software and associated documentation files (the
|
||||||
|
"Software"), to deal in the Software without restriction, including
|
||||||
|
without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
distribute, sublicense, and/or sell copies of the Software, and to
|
||||||
|
permit persons to whom the Software is furnished to do so, subject to
|
||||||
|
the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be
|
||||||
|
included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||||
|
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||||
|
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||||
|
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||||
|
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
@ -0,0 +1,19 @@
|
||||||
|
# @babel/compat-data
|
||||||
|
|
||||||
|
>
|
||||||
|
|
||||||
|
See our website [@babel/compat-data](https://babeljs.io/docs/en/babel-compat-data) for more information.
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
Using npm:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install --save @babel/compat-data
|
||||||
|
```
|
||||||
|
|
||||||
|
or using yarn:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
yarn add @babel/compat-data
|
||||||
|
```
|
|
@ -0,0 +1 @@
|
||||||
|
module.exports = require("./data/corejs2-built-ins.json");
|
1
frontend/node_modules/@babel/compat-data/corejs3-shipped-proposals.js
generated
vendored
Normal file
1
frontend/node_modules/@babel/compat-data/corejs3-shipped-proposals.js
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
module.exports = require("./data/corejs3-shipped-proposals.json");
|
1935
frontend/node_modules/@babel/compat-data/data/corejs2-built-ins.json
generated
vendored
Normal file
1935
frontend/node_modules/@babel/compat-data/data/corejs2-built-ins.json
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
5
frontend/node_modules/@babel/compat-data/data/corejs3-shipped-proposals.json
generated
vendored
Normal file
5
frontend/node_modules/@babel/compat-data/data/corejs3-shipped-proposals.json
generated
vendored
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
[
|
||||||
|
"esnext.global-this",
|
||||||
|
"esnext.promise.all-settled",
|
||||||
|
"esnext.string.match-all"
|
||||||
|
]
|
|
@ -0,0 +1,18 @@
|
||||||
|
{
|
||||||
|
"es6.module": {
|
||||||
|
"chrome": "61",
|
||||||
|
"and_chr": "61",
|
||||||
|
"edge": "16",
|
||||||
|
"firefox": "60",
|
||||||
|
"and_ff": "60",
|
||||||
|
"node": "13.2.0",
|
||||||
|
"opera": "48",
|
||||||
|
"op_mob": "48",
|
||||||
|
"safari": "10.1",
|
||||||
|
"ios": "10.3",
|
||||||
|
"samsung": "8.2",
|
||||||
|
"android": "61",
|
||||||
|
"electron": "2.0",
|
||||||
|
"ios_saf": "10.3"
|
||||||
|
}
|
||||||
|
}
|
25
frontend/node_modules/@babel/compat-data/data/overlapping-plugins.json
generated
vendored
Normal file
25
frontend/node_modules/@babel/compat-data/data/overlapping-plugins.json
generated
vendored
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
{
|
||||||
|
"transform-async-to-generator": [
|
||||||
|
"bugfix/transform-async-arrows-in-class"
|
||||||
|
],
|
||||||
|
"transform-parameters": [
|
||||||
|
"bugfix/transform-edge-default-parameters",
|
||||||
|
"bugfix/transform-safari-id-destructuring-collision-in-function-expression"
|
||||||
|
],
|
||||||
|
"transform-function-name": [
|
||||||
|
"bugfix/transform-edge-function-name"
|
||||||
|
],
|
||||||
|
"transform-block-scoping": [
|
||||||
|
"bugfix/transform-safari-block-shadowing",
|
||||||
|
"bugfix/transform-safari-for-shadowing"
|
||||||
|
],
|
||||||
|
"transform-template-literals": [
|
||||||
|
"bugfix/transform-tagged-template-caching"
|
||||||
|
],
|
||||||
|
"transform-optional-chaining": [
|
||||||
|
"bugfix/transform-v8-spread-parameters-in-optional-chaining"
|
||||||
|
],
|
||||||
|
"proposal-optional-chaining": [
|
||||||
|
"bugfix/transform-v8-spread-parameters-in-optional-chaining"
|
||||||
|
]
|
||||||
|
}
|
184
frontend/node_modules/@babel/compat-data/data/plugin-bugfixes.json
generated
vendored
Normal file
184
frontend/node_modules/@babel/compat-data/data/plugin-bugfixes.json
generated
vendored
Normal file
|
@ -0,0 +1,184 @@
|
||||||
|
{
|
||||||
|
"bugfix/transform-async-arrows-in-class": {
|
||||||
|
"chrome": "55",
|
||||||
|
"opera": "42",
|
||||||
|
"edge": "15",
|
||||||
|
"firefox": "52",
|
||||||
|
"safari": "11",
|
||||||
|
"node": "7.6",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "11",
|
||||||
|
"samsung": "6",
|
||||||
|
"electron": "1.6"
|
||||||
|
},
|
||||||
|
"bugfix/transform-edge-default-parameters": {
|
||||||
|
"chrome": "49",
|
||||||
|
"opera": "36",
|
||||||
|
"edge": "18",
|
||||||
|
"firefox": "52",
|
||||||
|
"safari": "10",
|
||||||
|
"node": "6",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "10",
|
||||||
|
"samsung": "5",
|
||||||
|
"electron": "0.37"
|
||||||
|
},
|
||||||
|
"bugfix/transform-edge-function-name": {
|
||||||
|
"chrome": "51",
|
||||||
|
"opera": "38",
|
||||||
|
"edge": "79",
|
||||||
|
"firefox": "53",
|
||||||
|
"safari": "10",
|
||||||
|
"node": "6.5",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "10",
|
||||||
|
"samsung": "5",
|
||||||
|
"electron": "1.2"
|
||||||
|
},
|
||||||
|
"bugfix/transform-safari-block-shadowing": {
|
||||||
|
"chrome": "49",
|
||||||
|
"opera": "36",
|
||||||
|
"edge": "12",
|
||||||
|
"firefox": "44",
|
||||||
|
"safari": "11",
|
||||||
|
"node": "6",
|
||||||
|
"deno": "1",
|
||||||
|
"ie": "11",
|
||||||
|
"ios": "11",
|
||||||
|
"samsung": "5",
|
||||||
|
"electron": "0.37"
|
||||||
|
},
|
||||||
|
"bugfix/transform-safari-for-shadowing": {
|
||||||
|
"chrome": "49",
|
||||||
|
"opera": "36",
|
||||||
|
"edge": "12",
|
||||||
|
"firefox": "4",
|
||||||
|
"safari": "11",
|
||||||
|
"node": "6",
|
||||||
|
"deno": "1",
|
||||||
|
"ie": "11",
|
||||||
|
"ios": "11",
|
||||||
|
"samsung": "5",
|
||||||
|
"rhino": "1.7.13",
|
||||||
|
"electron": "0.37"
|
||||||
|
},
|
||||||
|
"bugfix/transform-safari-id-destructuring-collision-in-function-expression": {
|
||||||
|
"chrome": "49",
|
||||||
|
"opera": "36",
|
||||||
|
"edge": "14",
|
||||||
|
"firefox": "2",
|
||||||
|
"node": "6",
|
||||||
|
"deno": "1",
|
||||||
|
"samsung": "5",
|
||||||
|
"electron": "0.37"
|
||||||
|
},
|
||||||
|
"bugfix/transform-tagged-template-caching": {
|
||||||
|
"chrome": "41",
|
||||||
|
"opera": "28",
|
||||||
|
"edge": "12",
|
||||||
|
"firefox": "34",
|
||||||
|
"safari": "13",
|
||||||
|
"node": "4",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "13",
|
||||||
|
"samsung": "3.4",
|
||||||
|
"rhino": "1.7.14",
|
||||||
|
"electron": "0.21"
|
||||||
|
},
|
||||||
|
"bugfix/transform-v8-spread-parameters-in-optional-chaining": {
|
||||||
|
"chrome": "91",
|
||||||
|
"opera": "77",
|
||||||
|
"edge": "91",
|
||||||
|
"firefox": "74",
|
||||||
|
"safari": "13.1",
|
||||||
|
"node": "16.9",
|
||||||
|
"deno": "1.9",
|
||||||
|
"ios": "13.4",
|
||||||
|
"samsung": "16",
|
||||||
|
"electron": "13.0"
|
||||||
|
},
|
||||||
|
"transform-optional-chaining": {
|
||||||
|
"chrome": "80",
|
||||||
|
"opera": "67",
|
||||||
|
"edge": "80",
|
||||||
|
"firefox": "74",
|
||||||
|
"safari": "13.1",
|
||||||
|
"node": "14",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "13.4",
|
||||||
|
"samsung": "13",
|
||||||
|
"electron": "8.0"
|
||||||
|
},
|
||||||
|
"proposal-optional-chaining": {
|
||||||
|
"chrome": "80",
|
||||||
|
"opera": "67",
|
||||||
|
"edge": "80",
|
||||||
|
"firefox": "74",
|
||||||
|
"safari": "13.1",
|
||||||
|
"node": "14",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "13.4",
|
||||||
|
"samsung": "13",
|
||||||
|
"electron": "8.0"
|
||||||
|
},
|
||||||
|
"transform-parameters": {
|
||||||
|
"chrome": "49",
|
||||||
|
"opera": "36",
|
||||||
|
"edge": "15",
|
||||||
|
"firefox": "53",
|
||||||
|
"safari": "10",
|
||||||
|
"node": "6",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "10",
|
||||||
|
"samsung": "5",
|
||||||
|
"electron": "0.37"
|
||||||
|
},
|
||||||
|
"transform-async-to-generator": {
|
||||||
|
"chrome": "55",
|
||||||
|
"opera": "42",
|
||||||
|
"edge": "15",
|
||||||
|
"firefox": "52",
|
||||||
|
"safari": "10.1",
|
||||||
|
"node": "7.6",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "10.3",
|
||||||
|
"samsung": "6",
|
||||||
|
"electron": "1.6"
|
||||||
|
},
|
||||||
|
"transform-template-literals": {
|
||||||
|
"chrome": "41",
|
||||||
|
"opera": "28",
|
||||||
|
"edge": "13",
|
||||||
|
"firefox": "34",
|
||||||
|
"safari": "9",
|
||||||
|
"node": "4",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "9",
|
||||||
|
"samsung": "3.4",
|
||||||
|
"electron": "0.21"
|
||||||
|
},
|
||||||
|
"transform-function-name": {
|
||||||
|
"chrome": "51",
|
||||||
|
"opera": "38",
|
||||||
|
"edge": "14",
|
||||||
|
"firefox": "53",
|
||||||
|
"safari": "10",
|
||||||
|
"node": "6.5",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "10",
|
||||||
|
"samsung": "5",
|
||||||
|
"electron": "1.2"
|
||||||
|
},
|
||||||
|
"transform-block-scoping": {
|
||||||
|
"chrome": "49",
|
||||||
|
"opera": "36",
|
||||||
|
"edge": "14",
|
||||||
|
"firefox": "51",
|
||||||
|
"safari": "10",
|
||||||
|
"node": "6",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "10",
|
||||||
|
"samsung": "5",
|
||||||
|
"electron": "0.37"
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,691 @@
|
||||||
|
{
|
||||||
|
"transform-class-static-block": {
|
||||||
|
"chrome": "94",
|
||||||
|
"opera": "80",
|
||||||
|
"edge": "94",
|
||||||
|
"firefox": "93",
|
||||||
|
"node": "16.11",
|
||||||
|
"deno": "1.14",
|
||||||
|
"samsung": "17",
|
||||||
|
"electron": "15.0"
|
||||||
|
},
|
||||||
|
"proposal-class-static-block": {
|
||||||
|
"chrome": "94",
|
||||||
|
"opera": "80",
|
||||||
|
"edge": "94",
|
||||||
|
"firefox": "93",
|
||||||
|
"node": "16.11",
|
||||||
|
"deno": "1.14",
|
||||||
|
"samsung": "17",
|
||||||
|
"electron": "15.0"
|
||||||
|
},
|
||||||
|
"transform-private-property-in-object": {
|
||||||
|
"chrome": "91",
|
||||||
|
"opera": "77",
|
||||||
|
"edge": "91",
|
||||||
|
"firefox": "90",
|
||||||
|
"safari": "15",
|
||||||
|
"node": "16.9",
|
||||||
|
"deno": "1.9",
|
||||||
|
"ios": "15",
|
||||||
|
"samsung": "16",
|
||||||
|
"electron": "13.0"
|
||||||
|
},
|
||||||
|
"proposal-private-property-in-object": {
|
||||||
|
"chrome": "91",
|
||||||
|
"opera": "77",
|
||||||
|
"edge": "91",
|
||||||
|
"firefox": "90",
|
||||||
|
"safari": "15",
|
||||||
|
"node": "16.9",
|
||||||
|
"deno": "1.9",
|
||||||
|
"ios": "15",
|
||||||
|
"samsung": "16",
|
||||||
|
"electron": "13.0"
|
||||||
|
},
|
||||||
|
"transform-class-properties": {
|
||||||
|
"chrome": "74",
|
||||||
|
"opera": "62",
|
||||||
|
"edge": "79",
|
||||||
|
"firefox": "90",
|
||||||
|
"safari": "14.1",
|
||||||
|
"node": "12",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "15",
|
||||||
|
"samsung": "11",
|
||||||
|
"electron": "6.0"
|
||||||
|
},
|
||||||
|
"proposal-class-properties": {
|
||||||
|
"chrome": "74",
|
||||||
|
"opera": "62",
|
||||||
|
"edge": "79",
|
||||||
|
"firefox": "90",
|
||||||
|
"safari": "14.1",
|
||||||
|
"node": "12",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "15",
|
||||||
|
"samsung": "11",
|
||||||
|
"electron": "6.0"
|
||||||
|
},
|
||||||
|
"transform-private-methods": {
|
||||||
|
"chrome": "84",
|
||||||
|
"opera": "70",
|
||||||
|
"edge": "84",
|
||||||
|
"firefox": "90",
|
||||||
|
"safari": "15",
|
||||||
|
"node": "14.6",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "15",
|
||||||
|
"samsung": "14",
|
||||||
|
"electron": "10.0"
|
||||||
|
},
|
||||||
|
"proposal-private-methods": {
|
||||||
|
"chrome": "84",
|
||||||
|
"opera": "70",
|
||||||
|
"edge": "84",
|
||||||
|
"firefox": "90",
|
||||||
|
"safari": "15",
|
||||||
|
"node": "14.6",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "15",
|
||||||
|
"samsung": "14",
|
||||||
|
"electron": "10.0"
|
||||||
|
},
|
||||||
|
"transform-numeric-separator": {
|
||||||
|
"chrome": "75",
|
||||||
|
"opera": "62",
|
||||||
|
"edge": "79",
|
||||||
|
"firefox": "70",
|
||||||
|
"safari": "13",
|
||||||
|
"node": "12.5",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "13",
|
||||||
|
"samsung": "11",
|
||||||
|
"rhino": "1.7.14",
|
||||||
|
"electron": "6.0"
|
||||||
|
},
|
||||||
|
"proposal-numeric-separator": {
|
||||||
|
"chrome": "75",
|
||||||
|
"opera": "62",
|
||||||
|
"edge": "79",
|
||||||
|
"firefox": "70",
|
||||||
|
"safari": "13",
|
||||||
|
"node": "12.5",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "13",
|
||||||
|
"samsung": "11",
|
||||||
|
"rhino": "1.7.14",
|
||||||
|
"electron": "6.0"
|
||||||
|
},
|
||||||
|
"transform-logical-assignment-operators": {
|
||||||
|
"chrome": "85",
|
||||||
|
"opera": "71",
|
||||||
|
"edge": "85",
|
||||||
|
"firefox": "79",
|
||||||
|
"safari": "14",
|
||||||
|
"node": "15",
|
||||||
|
"deno": "1.2",
|
||||||
|
"ios": "14",
|
||||||
|
"samsung": "14",
|
||||||
|
"electron": "10.0"
|
||||||
|
},
|
||||||
|
"proposal-logical-assignment-operators": {
|
||||||
|
"chrome": "85",
|
||||||
|
"opera": "71",
|
||||||
|
"edge": "85",
|
||||||
|
"firefox": "79",
|
||||||
|
"safari": "14",
|
||||||
|
"node": "15",
|
||||||
|
"deno": "1.2",
|
||||||
|
"ios": "14",
|
||||||
|
"samsung": "14",
|
||||||
|
"electron": "10.0"
|
||||||
|
},
|
||||||
|
"transform-nullish-coalescing-operator": {
|
||||||
|
"chrome": "80",
|
||||||
|
"opera": "67",
|
||||||
|
"edge": "80",
|
||||||
|
"firefox": "72",
|
||||||
|
"safari": "13.1",
|
||||||
|
"node": "14",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "13.4",
|
||||||
|
"samsung": "13",
|
||||||
|
"electron": "8.0"
|
||||||
|
},
|
||||||
|
"proposal-nullish-coalescing-operator": {
|
||||||
|
"chrome": "80",
|
||||||
|
"opera": "67",
|
||||||
|
"edge": "80",
|
||||||
|
"firefox": "72",
|
||||||
|
"safari": "13.1",
|
||||||
|
"node": "14",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "13.4",
|
||||||
|
"samsung": "13",
|
||||||
|
"electron": "8.0"
|
||||||
|
},
|
||||||
|
"transform-optional-chaining": {
|
||||||
|
"chrome": "91",
|
||||||
|
"opera": "77",
|
||||||
|
"edge": "91",
|
||||||
|
"firefox": "74",
|
||||||
|
"safari": "13.1",
|
||||||
|
"node": "16.9",
|
||||||
|
"deno": "1.9",
|
||||||
|
"ios": "13.4",
|
||||||
|
"samsung": "16",
|
||||||
|
"electron": "13.0"
|
||||||
|
},
|
||||||
|
"proposal-optional-chaining": {
|
||||||
|
"chrome": "91",
|
||||||
|
"opera": "77",
|
||||||
|
"edge": "91",
|
||||||
|
"firefox": "74",
|
||||||
|
"safari": "13.1",
|
||||||
|
"node": "16.9",
|
||||||
|
"deno": "1.9",
|
||||||
|
"ios": "13.4",
|
||||||
|
"samsung": "16",
|
||||||
|
"electron": "13.0"
|
||||||
|
},
|
||||||
|
"transform-json-strings": {
|
||||||
|
"chrome": "66",
|
||||||
|
"opera": "53",
|
||||||
|
"edge": "79",
|
||||||
|
"firefox": "62",
|
||||||
|
"safari": "12",
|
||||||
|
"node": "10",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "12",
|
||||||
|
"samsung": "9",
|
||||||
|
"rhino": "1.7.14",
|
||||||
|
"electron": "3.0"
|
||||||
|
},
|
||||||
|
"proposal-json-strings": {
|
||||||
|
"chrome": "66",
|
||||||
|
"opera": "53",
|
||||||
|
"edge": "79",
|
||||||
|
"firefox": "62",
|
||||||
|
"safari": "12",
|
||||||
|
"node": "10",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "12",
|
||||||
|
"samsung": "9",
|
||||||
|
"rhino": "1.7.14",
|
||||||
|
"electron": "3.0"
|
||||||
|
},
|
||||||
|
"transform-optional-catch-binding": {
|
||||||
|
"chrome": "66",
|
||||||
|
"opera": "53",
|
||||||
|
"edge": "79",
|
||||||
|
"firefox": "58",
|
||||||
|
"safari": "11.1",
|
||||||
|
"node": "10",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "11.3",
|
||||||
|
"samsung": "9",
|
||||||
|
"electron": "3.0"
|
||||||
|
},
|
||||||
|
"proposal-optional-catch-binding": {
|
||||||
|
"chrome": "66",
|
||||||
|
"opera": "53",
|
||||||
|
"edge": "79",
|
||||||
|
"firefox": "58",
|
||||||
|
"safari": "11.1",
|
||||||
|
"node": "10",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "11.3",
|
||||||
|
"samsung": "9",
|
||||||
|
"electron": "3.0"
|
||||||
|
},
|
||||||
|
"transform-parameters": {
|
||||||
|
"chrome": "49",
|
||||||
|
"opera": "36",
|
||||||
|
"edge": "18",
|
||||||
|
"firefox": "53",
|
||||||
|
"node": "6",
|
||||||
|
"deno": "1",
|
||||||
|
"samsung": "5",
|
||||||
|
"electron": "0.37"
|
||||||
|
},
|
||||||
|
"transform-async-generator-functions": {
|
||||||
|
"chrome": "63",
|
||||||
|
"opera": "50",
|
||||||
|
"edge": "79",
|
||||||
|
"firefox": "57",
|
||||||
|
"safari": "12",
|
||||||
|
"node": "10",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "12",
|
||||||
|
"samsung": "8",
|
||||||
|
"electron": "3.0"
|
||||||
|
},
|
||||||
|
"proposal-async-generator-functions": {
|
||||||
|
"chrome": "63",
|
||||||
|
"opera": "50",
|
||||||
|
"edge": "79",
|
||||||
|
"firefox": "57",
|
||||||
|
"safari": "12",
|
||||||
|
"node": "10",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "12",
|
||||||
|
"samsung": "8",
|
||||||
|
"electron": "3.0"
|
||||||
|
},
|
||||||
|
"transform-object-rest-spread": {
|
||||||
|
"chrome": "60",
|
||||||
|
"opera": "47",
|
||||||
|
"edge": "79",
|
||||||
|
"firefox": "55",
|
||||||
|
"safari": "11.1",
|
||||||
|
"node": "8.3",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "11.3",
|
||||||
|
"samsung": "8",
|
||||||
|
"electron": "2.0"
|
||||||
|
},
|
||||||
|
"proposal-object-rest-spread": {
|
||||||
|
"chrome": "60",
|
||||||
|
"opera": "47",
|
||||||
|
"edge": "79",
|
||||||
|
"firefox": "55",
|
||||||
|
"safari": "11.1",
|
||||||
|
"node": "8.3",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "11.3",
|
||||||
|
"samsung": "8",
|
||||||
|
"electron": "2.0"
|
||||||
|
},
|
||||||
|
"transform-dotall-regex": {
|
||||||
|
"chrome": "62",
|
||||||
|
"opera": "49",
|
||||||
|
"edge": "79",
|
||||||
|
"firefox": "78",
|
||||||
|
"safari": "11.1",
|
||||||
|
"node": "8.10",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "11.3",
|
||||||
|
"samsung": "8",
|
||||||
|
"electron": "3.0"
|
||||||
|
},
|
||||||
|
"transform-unicode-property-regex": {
|
||||||
|
"chrome": "64",
|
||||||
|
"opera": "51",
|
||||||
|
"edge": "79",
|
||||||
|
"firefox": "78",
|
||||||
|
"safari": "11.1",
|
||||||
|
"node": "10",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "11.3",
|
||||||
|
"samsung": "9",
|
||||||
|
"electron": "3.0"
|
||||||
|
},
|
||||||
|
"proposal-unicode-property-regex": {
|
||||||
|
"chrome": "64",
|
||||||
|
"opera": "51",
|
||||||
|
"edge": "79",
|
||||||
|
"firefox": "78",
|
||||||
|
"safari": "11.1",
|
||||||
|
"node": "10",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "11.3",
|
||||||
|
"samsung": "9",
|
||||||
|
"electron": "3.0"
|
||||||
|
},
|
||||||
|
"transform-named-capturing-groups-regex": {
|
||||||
|
"chrome": "64",
|
||||||
|
"opera": "51",
|
||||||
|
"edge": "79",
|
||||||
|
"firefox": "78",
|
||||||
|
"safari": "11.1",
|
||||||
|
"node": "10",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "11.3",
|
||||||
|
"samsung": "9",
|
||||||
|
"electron": "3.0"
|
||||||
|
},
|
||||||
|
"transform-async-to-generator": {
|
||||||
|
"chrome": "55",
|
||||||
|
"opera": "42",
|
||||||
|
"edge": "15",
|
||||||
|
"firefox": "52",
|
||||||
|
"safari": "11",
|
||||||
|
"node": "7.6",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "11",
|
||||||
|
"samsung": "6",
|
||||||
|
"electron": "1.6"
|
||||||
|
},
|
||||||
|
"transform-exponentiation-operator": {
|
||||||
|
"chrome": "52",
|
||||||
|
"opera": "39",
|
||||||
|
"edge": "14",
|
||||||
|
"firefox": "52",
|
||||||
|
"safari": "10.1",
|
||||||
|
"node": "7",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "10.3",
|
||||||
|
"samsung": "6",
|
||||||
|
"rhino": "1.7.14",
|
||||||
|
"electron": "1.3"
|
||||||
|
},
|
||||||
|
"transform-template-literals": {
|
||||||
|
"chrome": "41",
|
||||||
|
"opera": "28",
|
||||||
|
"edge": "13",
|
||||||
|
"firefox": "34",
|
||||||
|
"safari": "13",
|
||||||
|
"node": "4",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "13",
|
||||||
|
"samsung": "3.4",
|
||||||
|
"electron": "0.21"
|
||||||
|
},
|
||||||
|
"transform-literals": {
|
||||||
|
"chrome": "44",
|
||||||
|
"opera": "31",
|
||||||
|
"edge": "12",
|
||||||
|
"firefox": "53",
|
||||||
|
"safari": "9",
|
||||||
|
"node": "4",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "9",
|
||||||
|
"samsung": "4",
|
||||||
|
"electron": "0.30"
|
||||||
|
},
|
||||||
|
"transform-function-name": {
|
||||||
|
"chrome": "51",
|
||||||
|
"opera": "38",
|
||||||
|
"edge": "79",
|
||||||
|
"firefox": "53",
|
||||||
|
"safari": "10",
|
||||||
|
"node": "6.5",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "10",
|
||||||
|
"samsung": "5",
|
||||||
|
"electron": "1.2"
|
||||||
|
},
|
||||||
|
"transform-arrow-functions": {
|
||||||
|
"chrome": "47",
|
||||||
|
"opera": "34",
|
||||||
|
"edge": "13",
|
||||||
|
"firefox": "43",
|
||||||
|
"safari": "10",
|
||||||
|
"node": "6",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "10",
|
||||||
|
"samsung": "5",
|
||||||
|
"rhino": "1.7.13",
|
||||||
|
"electron": "0.36"
|
||||||
|
},
|
||||||
|
"transform-block-scoped-functions": {
|
||||||
|
"chrome": "41",
|
||||||
|
"opera": "28",
|
||||||
|
"edge": "12",
|
||||||
|
"firefox": "46",
|
||||||
|
"safari": "10",
|
||||||
|
"node": "4",
|
||||||
|
"deno": "1",
|
||||||
|
"ie": "11",
|
||||||
|
"ios": "10",
|
||||||
|
"samsung": "3.4",
|
||||||
|
"electron": "0.21"
|
||||||
|
},
|
||||||
|
"transform-classes": {
|
||||||
|
"chrome": "46",
|
||||||
|
"opera": "33",
|
||||||
|
"edge": "13",
|
||||||
|
"firefox": "45",
|
||||||
|
"safari": "10",
|
||||||
|
"node": "5",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "10",
|
||||||
|
"samsung": "5",
|
||||||
|
"electron": "0.36"
|
||||||
|
},
|
||||||
|
"transform-object-super": {
|
||||||
|
"chrome": "46",
|
||||||
|
"opera": "33",
|
||||||
|
"edge": "13",
|
||||||
|
"firefox": "45",
|
||||||
|
"safari": "10",
|
||||||
|
"node": "5",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "10",
|
||||||
|
"samsung": "5",
|
||||||
|
"electron": "0.36"
|
||||||
|
},
|
||||||
|
"transform-shorthand-properties": {
|
||||||
|
"chrome": "43",
|
||||||
|
"opera": "30",
|
||||||
|
"edge": "12",
|
||||||
|
"firefox": "33",
|
||||||
|
"safari": "9",
|
||||||
|
"node": "4",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "9",
|
||||||
|
"samsung": "4",
|
||||||
|
"rhino": "1.7.14",
|
||||||
|
"electron": "0.27"
|
||||||
|
},
|
||||||
|
"transform-duplicate-keys": {
|
||||||
|
"chrome": "42",
|
||||||
|
"opera": "29",
|
||||||
|
"edge": "12",
|
||||||
|
"firefox": "34",
|
||||||
|
"safari": "9",
|
||||||
|
"node": "4",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "9",
|
||||||
|
"samsung": "3.4",
|
||||||
|
"electron": "0.25"
|
||||||
|
},
|
||||||
|
"transform-computed-properties": {
|
||||||
|
"chrome": "44",
|
||||||
|
"opera": "31",
|
||||||
|
"edge": "12",
|
||||||
|
"firefox": "34",
|
||||||
|
"safari": "7.1",
|
||||||
|
"node": "4",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "8",
|
||||||
|
"samsung": "4",
|
||||||
|
"electron": "0.30"
|
||||||
|
},
|
||||||
|
"transform-for-of": {
|
||||||
|
"chrome": "51",
|
||||||
|
"opera": "38",
|
||||||
|
"edge": "15",
|
||||||
|
"firefox": "53",
|
||||||
|
"safari": "10",
|
||||||
|
"node": "6.5",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "10",
|
||||||
|
"samsung": "5",
|
||||||
|
"electron": "1.2"
|
||||||
|
},
|
||||||
|
"transform-sticky-regex": {
|
||||||
|
"chrome": "49",
|
||||||
|
"opera": "36",
|
||||||
|
"edge": "13",
|
||||||
|
"firefox": "3",
|
||||||
|
"safari": "10",
|
||||||
|
"node": "6",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "10",
|
||||||
|
"samsung": "5",
|
||||||
|
"electron": "0.37"
|
||||||
|
},
|
||||||
|
"transform-unicode-escapes": {
|
||||||
|
"chrome": "44",
|
||||||
|
"opera": "31",
|
||||||
|
"edge": "12",
|
||||||
|
"firefox": "53",
|
||||||
|
"safari": "9",
|
||||||
|
"node": "4",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "9",
|
||||||
|
"samsung": "4",
|
||||||
|
"electron": "0.30"
|
||||||
|
},
|
||||||
|
"transform-unicode-regex": {
|
||||||
|
"chrome": "50",
|
||||||
|
"opera": "37",
|
||||||
|
"edge": "13",
|
||||||
|
"firefox": "46",
|
||||||
|
"safari": "12",
|
||||||
|
"node": "6",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "12",
|
||||||
|
"samsung": "5",
|
||||||
|
"electron": "1.1"
|
||||||
|
},
|
||||||
|
"transform-spread": {
|
||||||
|
"chrome": "46",
|
||||||
|
"opera": "33",
|
||||||
|
"edge": "13",
|
||||||
|
"firefox": "45",
|
||||||
|
"safari": "10",
|
||||||
|
"node": "5",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "10",
|
||||||
|
"samsung": "5",
|
||||||
|
"electron": "0.36"
|
||||||
|
},
|
||||||
|
"transform-destructuring": {
|
||||||
|
"chrome": "51",
|
||||||
|
"opera": "38",
|
||||||
|
"edge": "15",
|
||||||
|
"firefox": "53",
|
||||||
|
"safari": "10",
|
||||||
|
"node": "6.5",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "10",
|
||||||
|
"samsung": "5",
|
||||||
|
"electron": "1.2"
|
||||||
|
},
|
||||||
|
"transform-block-scoping": {
|
||||||
|
"chrome": "49",
|
||||||
|
"opera": "36",
|
||||||
|
"edge": "14",
|
||||||
|
"firefox": "51",
|
||||||
|
"safari": "11",
|
||||||
|
"node": "6",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "11",
|
||||||
|
"samsung": "5",
|
||||||
|
"electron": "0.37"
|
||||||
|
},
|
||||||
|
"transform-typeof-symbol": {
|
||||||
|
"chrome": "38",
|
||||||
|
"opera": "25",
|
||||||
|
"edge": "12",
|
||||||
|
"firefox": "36",
|
||||||
|
"safari": "9",
|
||||||
|
"node": "0.12",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "9",
|
||||||
|
"samsung": "3",
|
||||||
|
"rhino": "1.7.13",
|
||||||
|
"electron": "0.20"
|
||||||
|
},
|
||||||
|
"transform-new-target": {
|
||||||
|
"chrome": "46",
|
||||||
|
"opera": "33",
|
||||||
|
"edge": "14",
|
||||||
|
"firefox": "41",
|
||||||
|
"safari": "10",
|
||||||
|
"node": "5",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "10",
|
||||||
|
"samsung": "5",
|
||||||
|
"electron": "0.36"
|
||||||
|
},
|
||||||
|
"transform-regenerator": {
|
||||||
|
"chrome": "50",
|
||||||
|
"opera": "37",
|
||||||
|
"edge": "13",
|
||||||
|
"firefox": "53",
|
||||||
|
"safari": "10",
|
||||||
|
"node": "6",
|
||||||
|
"deno": "1",
|
||||||
|
"ios": "10",
|
||||||
|
"samsung": "5",
|
||||||
|
"electron": "1.1"
|
||||||
|
},
|
||||||
|
"transform-member-expression-literals": {
|
||||||
|
"chrome": "7",
|
||||||
|
"opera": "12",
|
||||||
|
"edge": "12",
|
||||||
|
"firefox": "2",
|
||||||
|
"safari": "5.1",
|
||||||
|
"node": "0.4",
|
||||||
|
"deno": "1",
|
||||||
|
"ie": "9",
|
||||||
|
"android": "4",
|
||||||
|
"ios": "6",
|
||||||
|
"phantom": "1.9",
|
||||||
|
"samsung": "1",
|
||||||
|
"rhino": "1.7.13",
|
||||||
|
"electron": "0.20"
|
||||||
|
},
|
||||||
|
"transform-property-literals": {
|
||||||
|
"chrome": "7",
|
||||||
|
"opera": "12",
|
||||||
|
"edge": "12",
|
||||||
|
"firefox": "2",
|
||||||
|
"safari": "5.1",
|
||||||
|
"node": "0.4",
|
||||||
|
"deno": "1",
|
||||||
|
"ie": "9",
|
||||||
|
"android": "4",
|
||||||
|
"ios": "6",
|
||||||
|
"phantom": "1.9",
|
||||||
|
"samsung": "1",
|
||||||
|
"rhino": "1.7.13",
|
||||||
|
"electron": "0.20"
|
||||||
|
},
|
||||||
|
"transform-reserved-words": {
|
||||||
|
"chrome": "13",
|
||||||
|
"opera": "10.50",
|
||||||
|
"edge": "12",
|
||||||
|
"firefox": "2",
|
||||||
|
"safari": "3.1",
|
||||||
|
"node": "0.6",
|
||||||
|
"deno": "1",
|
||||||
|
"ie": "9",
|
||||||
|
"android": "4.4",
|
||||||
|
"ios": "6",
|
||||||
|
"phantom": "1.9",
|
||||||
|
"samsung": "1",
|
||||||
|
"rhino": "1.7.13",
|
||||||
|
"electron": "0.20"
|
||||||
|
},
|
||||||
|
"transform-export-namespace-from": {
|
||||||
|
"chrome": "72",
|
||||||
|
"and_chr": "72",
|
||||||
|
"edge": "79",
|
||||||
|
"firefox": "80",
|
||||||
|
"and_ff": "80",
|
||||||
|
"node": "13.2",
|
||||||
|
"opera": "60",
|
||||||
|
"op_mob": "51",
|
||||||
|
"samsung": "11.0",
|
||||||
|
"android": "72",
|
||||||
|
"electron": "5.0"
|
||||||
|
},
|
||||||
|
"proposal-export-namespace-from": {
|
||||||
|
"chrome": "72",
|
||||||
|
"and_chr": "72",
|
||||||
|
"edge": "79",
|
||||||
|
"firefox": "80",
|
||||||
|
"and_ff": "80",
|
||||||
|
"node": "13.2",
|
||||||
|
"opera": "60",
|
||||||
|
"op_mob": "51",
|
||||||
|
"samsung": "11.0",
|
||||||
|
"android": "72",
|
||||||
|
"electron": "5.0"
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1 @@
|
||||||
|
module.exports = require("./data/native-modules.json");
|
|
@ -0,0 +1 @@
|
||||||
|
module.exports = require("./data/overlapping-plugins.json");
|
|
@ -0,0 +1,40 @@
|
||||||
|
{
|
||||||
|
"name": "@babel/compat-data",
|
||||||
|
"version": "7.20.14",
|
||||||
|
"author": "The Babel Team (https://babel.dev/team)",
|
||||||
|
"license": "MIT",
|
||||||
|
"description": "",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/babel/babel.git",
|
||||||
|
"directory": "packages/babel-compat-data"
|
||||||
|
},
|
||||||
|
"publishConfig": {
|
||||||
|
"access": "public"
|
||||||
|
},
|
||||||
|
"exports": {
|
||||||
|
"./plugins": "./plugins.js",
|
||||||
|
"./native-modules": "./native-modules.js",
|
||||||
|
"./corejs2-built-ins": "./corejs2-built-ins.js",
|
||||||
|
"./corejs3-shipped-proposals": "./corejs3-shipped-proposals.js",
|
||||||
|
"./overlapping-plugins": "./overlapping-plugins.js",
|
||||||
|
"./plugin-bugfixes": "./plugin-bugfixes.js"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"build-data": "./scripts/download-compat-table.sh && node ./scripts/build-data.js && node ./scripts/build-modules-support.js && node ./scripts/build-bugfixes-targets.js"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"babel",
|
||||||
|
"compat-table",
|
||||||
|
"compat-data"
|
||||||
|
],
|
||||||
|
"devDependencies": {
|
||||||
|
"@mdn/browser-compat-data": "^4.0.10",
|
||||||
|
"core-js-compat": "^3.25.1",
|
||||||
|
"electron-to-chromium": "^1.4.248"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6.9.0"
|
||||||
|
},
|
||||||
|
"type": "commonjs"
|
||||||
|
}
|
|
@ -0,0 +1 @@
|
||||||
|
module.exports = require("./data/plugin-bugfixes.json");
|
|
@ -0,0 +1 @@
|
||||||
|
module.exports = require("./data/plugins.json");
|
|
@ -0,0 +1,22 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2014-present Sebastian McKenzie and other contributors
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining
|
||||||
|
a copy of this software and associated documentation files (the
|
||||||
|
"Software"), to deal in the Software without restriction, including
|
||||||
|
without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
distribute, sublicense, and/or sell copies of the Software, and to
|
||||||
|
permit persons to whom the Software is furnished to do so, subject to
|
||||||
|
the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be
|
||||||
|
included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||||
|
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||||
|
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||||
|
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||||
|
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
@ -0,0 +1,19 @@
|
||||||
|
# @babel/core
|
||||||
|
|
||||||
|
> Babel compiler core.
|
||||||
|
|
||||||
|
See our website [@babel/core](https://babeljs.io/docs/en/babel-core) for more information or the [issues](https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+label%3A%22pkg%3A%20core%22+is%3Aopen) associated with this package.
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
Using npm:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npm install --save-dev @babel/core
|
||||||
|
```
|
||||||
|
|
||||||
|
or using yarn:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
yarn add @babel/core --dev
|
||||||
|
```
|
|
@ -0,0 +1,29 @@
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
const babelP = import("./lib/index.js");
|
||||||
|
|
||||||
|
const functionNames = [
|
||||||
|
"createConfigItem",
|
||||||
|
"loadPartialConfig",
|
||||||
|
"loadOptions",
|
||||||
|
"transform",
|
||||||
|
"transformFile",
|
||||||
|
"transformFromAst",
|
||||||
|
"parse",
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const name of functionNames) {
|
||||||
|
exports[`${name}Sync`] = function () {
|
||||||
|
throw new Error(
|
||||||
|
`"${name}Sync" is not supported when loading @babel/core using require()`
|
||||||
|
);
|
||||||
|
};
|
||||||
|
exports[name] = function (...args) {
|
||||||
|
babelP.then(babel => {
|
||||||
|
babel[name](...args);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
exports[`${name}Async`] = function (...args) {
|
||||||
|
return babelP.then(babel => babel[`${name}Async`](...args));
|
||||||
|
};
|
||||||
|
}
|
|
@ -0,0 +1,3 @@
|
||||||
|
0 && 0;
|
||||||
|
|
||||||
|
//# sourceMappingURL=cache-contexts.js.map
|
|
@ -0,0 +1 @@
|
||||||
|
{"version":3,"names":[],"sources":["../../src/config/cache-contexts.ts"],"sourcesContent":["import type { Targets } from \"@babel/helper-compilation-targets\";\n\nimport type { ConfigContext } from \"./config-chain\";\nimport type { CallerMetadata } from \"./validation/options\";\n\nexport type { ConfigContext as FullConfig };\n\nexport type FullPreset = {\n targets: Targets;\n} & ConfigContext;\nexport type FullPlugin = {\n assumptions: { [name: string]: boolean };\n} & FullPreset;\n\n// Context not including filename since it is used in places that cannot\n// process 'ignore'/'only' and other filename-based logic.\nexport type SimpleConfig = {\n envName: string;\n caller: CallerMetadata | undefined;\n};\nexport type SimplePreset = {\n targets: Targets;\n} & SimpleConfig;\nexport type SimplePlugin = {\n assumptions: {\n [name: string]: boolean;\n };\n} & SimplePreset;\n"],"mappings":""}
|
|
@ -0,0 +1,261 @@
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
Object.defineProperty(exports, "__esModule", {
|
||||||
|
value: true
|
||||||
|
});
|
||||||
|
exports.assertSimpleType = assertSimpleType;
|
||||||
|
exports.makeStrongCache = makeStrongCache;
|
||||||
|
exports.makeStrongCacheSync = makeStrongCacheSync;
|
||||||
|
exports.makeWeakCache = makeWeakCache;
|
||||||
|
exports.makeWeakCacheSync = makeWeakCacheSync;
|
||||||
|
function _gensync() {
|
||||||
|
const data = require("gensync");
|
||||||
|
_gensync = function () {
|
||||||
|
return data;
|
||||||
|
};
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
var _async = require("../gensync-utils/async");
|
||||||
|
var _util = require("./util");
|
||||||
|
const synchronize = gen => {
|
||||||
|
return _gensync()(gen).sync;
|
||||||
|
};
|
||||||
|
function* genTrue() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
function makeWeakCache(handler) {
|
||||||
|
return makeCachedFunction(WeakMap, handler);
|
||||||
|
}
|
||||||
|
function makeWeakCacheSync(handler) {
|
||||||
|
return synchronize(makeWeakCache(handler));
|
||||||
|
}
|
||||||
|
function makeStrongCache(handler) {
|
||||||
|
return makeCachedFunction(Map, handler);
|
||||||
|
}
|
||||||
|
function makeStrongCacheSync(handler) {
|
||||||
|
return synchronize(makeStrongCache(handler));
|
||||||
|
}
|
||||||
|
function makeCachedFunction(CallCache, handler) {
|
||||||
|
const callCacheSync = new CallCache();
|
||||||
|
const callCacheAsync = new CallCache();
|
||||||
|
const futureCache = new CallCache();
|
||||||
|
return function* cachedFunction(arg, data) {
|
||||||
|
const asyncContext = yield* (0, _async.isAsync)();
|
||||||
|
const callCache = asyncContext ? callCacheAsync : callCacheSync;
|
||||||
|
const cached = yield* getCachedValueOrWait(asyncContext, callCache, futureCache, arg, data);
|
||||||
|
if (cached.valid) return cached.value;
|
||||||
|
const cache = new CacheConfigurator(data);
|
||||||
|
const handlerResult = handler(arg, cache);
|
||||||
|
let finishLock;
|
||||||
|
let value;
|
||||||
|
if ((0, _util.isIterableIterator)(handlerResult)) {
|
||||||
|
value = yield* (0, _async.onFirstPause)(handlerResult, () => {
|
||||||
|
finishLock = setupAsyncLocks(cache, futureCache, arg);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
value = handlerResult;
|
||||||
|
}
|
||||||
|
updateFunctionCache(callCache, cache, arg, value);
|
||||||
|
if (finishLock) {
|
||||||
|
futureCache.delete(arg);
|
||||||
|
finishLock.release(value);
|
||||||
|
}
|
||||||
|
return value;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function* getCachedValue(cache, arg, data) {
|
||||||
|
const cachedValue = cache.get(arg);
|
||||||
|
if (cachedValue) {
|
||||||
|
for (const {
|
||||||
|
value,
|
||||||
|
valid
|
||||||
|
} of cachedValue) {
|
||||||
|
if (yield* valid(data)) return {
|
||||||
|
valid: true,
|
||||||
|
value
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
valid: false,
|
||||||
|
value: null
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function* getCachedValueOrWait(asyncContext, callCache, futureCache, arg, data) {
|
||||||
|
const cached = yield* getCachedValue(callCache, arg, data);
|
||||||
|
if (cached.valid) {
|
||||||
|
return cached;
|
||||||
|
}
|
||||||
|
if (asyncContext) {
|
||||||
|
const cached = yield* getCachedValue(futureCache, arg, data);
|
||||||
|
if (cached.valid) {
|
||||||
|
const value = yield* (0, _async.waitFor)(cached.value.promise);
|
||||||
|
return {
|
||||||
|
valid: true,
|
||||||
|
value
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
valid: false,
|
||||||
|
value: null
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function setupAsyncLocks(config, futureCache, arg) {
|
||||||
|
const finishLock = new Lock();
|
||||||
|
updateFunctionCache(futureCache, config, arg, finishLock);
|
||||||
|
return finishLock;
|
||||||
|
}
|
||||||
|
function updateFunctionCache(cache, config, arg, value) {
|
||||||
|
if (!config.configured()) config.forever();
|
||||||
|
let cachedValue = cache.get(arg);
|
||||||
|
config.deactivate();
|
||||||
|
switch (config.mode()) {
|
||||||
|
case "forever":
|
||||||
|
cachedValue = [{
|
||||||
|
value,
|
||||||
|
valid: genTrue
|
||||||
|
}];
|
||||||
|
cache.set(arg, cachedValue);
|
||||||
|
break;
|
||||||
|
case "invalidate":
|
||||||
|
cachedValue = [{
|
||||||
|
value,
|
||||||
|
valid: config.validator()
|
||||||
|
}];
|
||||||
|
cache.set(arg, cachedValue);
|
||||||
|
break;
|
||||||
|
case "valid":
|
||||||
|
if (cachedValue) {
|
||||||
|
cachedValue.push({
|
||||||
|
value,
|
||||||
|
valid: config.validator()
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
cachedValue = [{
|
||||||
|
value,
|
||||||
|
valid: config.validator()
|
||||||
|
}];
|
||||||
|
cache.set(arg, cachedValue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
class CacheConfigurator {
|
||||||
|
constructor(data) {
|
||||||
|
this._active = true;
|
||||||
|
this._never = false;
|
||||||
|
this._forever = false;
|
||||||
|
this._invalidate = false;
|
||||||
|
this._configured = false;
|
||||||
|
this._pairs = [];
|
||||||
|
this._data = void 0;
|
||||||
|
this._data = data;
|
||||||
|
}
|
||||||
|
simple() {
|
||||||
|
return makeSimpleConfigurator(this);
|
||||||
|
}
|
||||||
|
mode() {
|
||||||
|
if (this._never) return "never";
|
||||||
|
if (this._forever) return "forever";
|
||||||
|
if (this._invalidate) return "invalidate";
|
||||||
|
return "valid";
|
||||||
|
}
|
||||||
|
forever() {
|
||||||
|
if (!this._active) {
|
||||||
|
throw new Error("Cannot change caching after evaluation has completed.");
|
||||||
|
}
|
||||||
|
if (this._never) {
|
||||||
|
throw new Error("Caching has already been configured with .never()");
|
||||||
|
}
|
||||||
|
this._forever = true;
|
||||||
|
this._configured = true;
|
||||||
|
}
|
||||||
|
never() {
|
||||||
|
if (!this._active) {
|
||||||
|
throw new Error("Cannot change caching after evaluation has completed.");
|
||||||
|
}
|
||||||
|
if (this._forever) {
|
||||||
|
throw new Error("Caching has already been configured with .forever()");
|
||||||
|
}
|
||||||
|
this._never = true;
|
||||||
|
this._configured = true;
|
||||||
|
}
|
||||||
|
using(handler) {
|
||||||
|
if (!this._active) {
|
||||||
|
throw new Error("Cannot change caching after evaluation has completed.");
|
||||||
|
}
|
||||||
|
if (this._never || this._forever) {
|
||||||
|
throw new Error("Caching has already been configured with .never or .forever()");
|
||||||
|
}
|
||||||
|
this._configured = true;
|
||||||
|
const key = handler(this._data);
|
||||||
|
const fn = (0, _async.maybeAsync)(handler, `You appear to be using an async cache handler, but Babel has been called synchronously`);
|
||||||
|
if ((0, _async.isThenable)(key)) {
|
||||||
|
return key.then(key => {
|
||||||
|
this._pairs.push([key, fn]);
|
||||||
|
return key;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
this._pairs.push([key, fn]);
|
||||||
|
return key;
|
||||||
|
}
|
||||||
|
invalidate(handler) {
|
||||||
|
this._invalidate = true;
|
||||||
|
return this.using(handler);
|
||||||
|
}
|
||||||
|
validator() {
|
||||||
|
const pairs = this._pairs;
|
||||||
|
return function* (data) {
|
||||||
|
for (const [key, fn] of pairs) {
|
||||||
|
if (key !== (yield* fn(data))) return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
deactivate() {
|
||||||
|
this._active = false;
|
||||||
|
}
|
||||||
|
configured() {
|
||||||
|
return this._configured;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function makeSimpleConfigurator(cache) {
|
||||||
|
function cacheFn(val) {
|
||||||
|
if (typeof val === "boolean") {
|
||||||
|
if (val) cache.forever();else cache.never();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
return cache.using(() => assertSimpleType(val()));
|
||||||
|
}
|
||||||
|
cacheFn.forever = () => cache.forever();
|
||||||
|
cacheFn.never = () => cache.never();
|
||||||
|
cacheFn.using = cb => cache.using(() => assertSimpleType(cb()));
|
||||||
|
cacheFn.invalidate = cb => cache.invalidate(() => assertSimpleType(cb()));
|
||||||
|
return cacheFn;
|
||||||
|
}
|
||||||
|
function assertSimpleType(value) {
|
||||||
|
if ((0, _async.isThenable)(value)) {
|
||||||
|
throw new Error(`You appear to be using an async cache handler, ` + `which your current version of Babel does not support. ` + `We may add support for this in the future, ` + `but if you're on the most recent version of @babel/core and still ` + `seeing this error, then you'll need to synchronously handle your caching logic.`);
|
||||||
|
}
|
||||||
|
if (value != null && typeof value !== "string" && typeof value !== "boolean" && typeof value !== "number") {
|
||||||
|
throw new Error("Cache keys must be either string, boolean, number, null, or undefined.");
|
||||||
|
}
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
class Lock {
|
||||||
|
constructor() {
|
||||||
|
this.released = false;
|
||||||
|
this.promise = void 0;
|
||||||
|
this._resolve = void 0;
|
||||||
|
this.promise = new Promise(resolve => {
|
||||||
|
this._resolve = resolve;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
release(value) {
|
||||||
|
this.released = true;
|
||||||
|
this._resolve(value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
0 && 0;
|
||||||
|
|
||||||
|
//# sourceMappingURL=caching.js.map
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,469 @@
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
Object.defineProperty(exports, "__esModule", {
|
||||||
|
value: true
|
||||||
|
});
|
||||||
|
exports.buildPresetChain = buildPresetChain;
|
||||||
|
exports.buildPresetChainWalker = void 0;
|
||||||
|
exports.buildRootChain = buildRootChain;
|
||||||
|
function _path() {
|
||||||
|
const data = require("path");
|
||||||
|
_path = function () {
|
||||||
|
return data;
|
||||||
|
};
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
function _debug() {
|
||||||
|
const data = require("debug");
|
||||||
|
_debug = function () {
|
||||||
|
return data;
|
||||||
|
};
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
var _options = require("./validation/options");
|
||||||
|
var _patternToRegex = require("./pattern-to-regex");
|
||||||
|
var _printer = require("./printer");
|
||||||
|
var _rewriteStackTrace = require("../errors/rewrite-stack-trace");
|
||||||
|
var _configError = require("../errors/config-error");
|
||||||
|
var _files = require("./files");
|
||||||
|
var _caching = require("./caching");
|
||||||
|
var _configDescriptors = require("./config-descriptors");
|
||||||
|
const debug = _debug()("babel:config:config-chain");
|
||||||
|
function* buildPresetChain(arg, context) {
|
||||||
|
const chain = yield* buildPresetChainWalker(arg, context);
|
||||||
|
if (!chain) return null;
|
||||||
|
return {
|
||||||
|
plugins: dedupDescriptors(chain.plugins),
|
||||||
|
presets: dedupDescriptors(chain.presets),
|
||||||
|
options: chain.options.map(o => normalizeOptions(o)),
|
||||||
|
files: new Set()
|
||||||
|
};
|
||||||
|
}
|
||||||
|
const buildPresetChainWalker = makeChainWalker({
|
||||||
|
root: preset => loadPresetDescriptors(preset),
|
||||||
|
env: (preset, envName) => loadPresetEnvDescriptors(preset)(envName),
|
||||||
|
overrides: (preset, index) => loadPresetOverridesDescriptors(preset)(index),
|
||||||
|
overridesEnv: (preset, index, envName) => loadPresetOverridesEnvDescriptors(preset)(index)(envName),
|
||||||
|
createLogger: () => () => {}
|
||||||
|
});
|
||||||
|
exports.buildPresetChainWalker = buildPresetChainWalker;
|
||||||
|
const loadPresetDescriptors = (0, _caching.makeWeakCacheSync)(preset => buildRootDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors));
|
||||||
|
const loadPresetEnvDescriptors = (0, _caching.makeWeakCacheSync)(preset => (0, _caching.makeStrongCacheSync)(envName => buildEnvDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, envName)));
|
||||||
|
const loadPresetOverridesDescriptors = (0, _caching.makeWeakCacheSync)(preset => (0, _caching.makeStrongCacheSync)(index => buildOverrideDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, index)));
|
||||||
|
const loadPresetOverridesEnvDescriptors = (0, _caching.makeWeakCacheSync)(preset => (0, _caching.makeStrongCacheSync)(index => (0, _caching.makeStrongCacheSync)(envName => buildOverrideEnvDescriptors(preset, preset.alias, _configDescriptors.createUncachedDescriptors, index, envName))));
|
||||||
|
function* buildRootChain(opts, context) {
|
||||||
|
let configReport, babelRcReport;
|
||||||
|
const programmaticLogger = new _printer.ConfigPrinter();
|
||||||
|
const programmaticChain = yield* loadProgrammaticChain({
|
||||||
|
options: opts,
|
||||||
|
dirname: context.cwd
|
||||||
|
}, context, undefined, programmaticLogger);
|
||||||
|
if (!programmaticChain) return null;
|
||||||
|
const programmaticReport = yield* programmaticLogger.output();
|
||||||
|
let configFile;
|
||||||
|
if (typeof opts.configFile === "string") {
|
||||||
|
configFile = yield* (0, _files.loadConfig)(opts.configFile, context.cwd, context.envName, context.caller);
|
||||||
|
} else if (opts.configFile !== false) {
|
||||||
|
configFile = yield* (0, _files.findRootConfig)(context.root, context.envName, context.caller);
|
||||||
|
}
|
||||||
|
let {
|
||||||
|
babelrc,
|
||||||
|
babelrcRoots
|
||||||
|
} = opts;
|
||||||
|
let babelrcRootsDirectory = context.cwd;
|
||||||
|
const configFileChain = emptyChain();
|
||||||
|
const configFileLogger = new _printer.ConfigPrinter();
|
||||||
|
if (configFile) {
|
||||||
|
const validatedFile = validateConfigFile(configFile);
|
||||||
|
const result = yield* loadFileChain(validatedFile, context, undefined, configFileLogger);
|
||||||
|
if (!result) return null;
|
||||||
|
configReport = yield* configFileLogger.output();
|
||||||
|
if (babelrc === undefined) {
|
||||||
|
babelrc = validatedFile.options.babelrc;
|
||||||
|
}
|
||||||
|
if (babelrcRoots === undefined) {
|
||||||
|
babelrcRootsDirectory = validatedFile.dirname;
|
||||||
|
babelrcRoots = validatedFile.options.babelrcRoots;
|
||||||
|
}
|
||||||
|
mergeChain(configFileChain, result);
|
||||||
|
}
|
||||||
|
let ignoreFile, babelrcFile;
|
||||||
|
let isIgnored = false;
|
||||||
|
const fileChain = emptyChain();
|
||||||
|
if ((babelrc === true || babelrc === undefined) && typeof context.filename === "string") {
|
||||||
|
const pkgData = yield* (0, _files.findPackageData)(context.filename);
|
||||||
|
if (pkgData && babelrcLoadEnabled(context, pkgData, babelrcRoots, babelrcRootsDirectory)) {
|
||||||
|
({
|
||||||
|
ignore: ignoreFile,
|
||||||
|
config: babelrcFile
|
||||||
|
} = yield* (0, _files.findRelativeConfig)(pkgData, context.envName, context.caller));
|
||||||
|
if (ignoreFile) {
|
||||||
|
fileChain.files.add(ignoreFile.filepath);
|
||||||
|
}
|
||||||
|
if (ignoreFile && shouldIgnore(context, ignoreFile.ignore, null, ignoreFile.dirname)) {
|
||||||
|
isIgnored = true;
|
||||||
|
}
|
||||||
|
if (babelrcFile && !isIgnored) {
|
||||||
|
const validatedFile = validateBabelrcFile(babelrcFile);
|
||||||
|
const babelrcLogger = new _printer.ConfigPrinter();
|
||||||
|
const result = yield* loadFileChain(validatedFile, context, undefined, babelrcLogger);
|
||||||
|
if (!result) {
|
||||||
|
isIgnored = true;
|
||||||
|
} else {
|
||||||
|
babelRcReport = yield* babelrcLogger.output();
|
||||||
|
mergeChain(fileChain, result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (babelrcFile && isIgnored) {
|
||||||
|
fileChain.files.add(babelrcFile.filepath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (context.showConfig) {
|
||||||
|
console.log(`Babel configs on "${context.filename}" (ascending priority):\n` + [configReport, babelRcReport, programmaticReport].filter(x => !!x).join("\n\n") + "\n-----End Babel configs-----");
|
||||||
|
}
|
||||||
|
const chain = mergeChain(mergeChain(mergeChain(emptyChain(), configFileChain), fileChain), programmaticChain);
|
||||||
|
return {
|
||||||
|
plugins: isIgnored ? [] : dedupDescriptors(chain.plugins),
|
||||||
|
presets: isIgnored ? [] : dedupDescriptors(chain.presets),
|
||||||
|
options: isIgnored ? [] : chain.options.map(o => normalizeOptions(o)),
|
||||||
|
fileHandling: isIgnored ? "ignored" : "transpile",
|
||||||
|
ignore: ignoreFile || undefined,
|
||||||
|
babelrc: babelrcFile || undefined,
|
||||||
|
config: configFile || undefined,
|
||||||
|
files: chain.files
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function babelrcLoadEnabled(context, pkgData, babelrcRoots, babelrcRootsDirectory) {
|
||||||
|
if (typeof babelrcRoots === "boolean") return babelrcRoots;
|
||||||
|
const absoluteRoot = context.root;
|
||||||
|
if (babelrcRoots === undefined) {
|
||||||
|
return pkgData.directories.indexOf(absoluteRoot) !== -1;
|
||||||
|
}
|
||||||
|
let babelrcPatterns = babelrcRoots;
|
||||||
|
if (!Array.isArray(babelrcPatterns)) {
|
||||||
|
babelrcPatterns = [babelrcPatterns];
|
||||||
|
}
|
||||||
|
babelrcPatterns = babelrcPatterns.map(pat => {
|
||||||
|
return typeof pat === "string" ? _path().resolve(babelrcRootsDirectory, pat) : pat;
|
||||||
|
});
|
||||||
|
if (babelrcPatterns.length === 1 && babelrcPatterns[0] === absoluteRoot) {
|
||||||
|
return pkgData.directories.indexOf(absoluteRoot) !== -1;
|
||||||
|
}
|
||||||
|
return babelrcPatterns.some(pat => {
|
||||||
|
if (typeof pat === "string") {
|
||||||
|
pat = (0, _patternToRegex.default)(pat, babelrcRootsDirectory);
|
||||||
|
}
|
||||||
|
return pkgData.directories.some(directory => {
|
||||||
|
return matchPattern(pat, babelrcRootsDirectory, directory, context);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
const validateConfigFile = (0, _caching.makeWeakCacheSync)(file => ({
|
||||||
|
filepath: file.filepath,
|
||||||
|
dirname: file.dirname,
|
||||||
|
options: (0, _options.validate)("configfile", file.options, file.filepath)
|
||||||
|
}));
|
||||||
|
const validateBabelrcFile = (0, _caching.makeWeakCacheSync)(file => ({
|
||||||
|
filepath: file.filepath,
|
||||||
|
dirname: file.dirname,
|
||||||
|
options: (0, _options.validate)("babelrcfile", file.options, file.filepath)
|
||||||
|
}));
|
||||||
|
const validateExtendFile = (0, _caching.makeWeakCacheSync)(file => ({
|
||||||
|
filepath: file.filepath,
|
||||||
|
dirname: file.dirname,
|
||||||
|
options: (0, _options.validate)("extendsfile", file.options, file.filepath)
|
||||||
|
}));
|
||||||
|
const loadProgrammaticChain = makeChainWalker({
|
||||||
|
root: input => buildRootDescriptors(input, "base", _configDescriptors.createCachedDescriptors),
|
||||||
|
env: (input, envName) => buildEnvDescriptors(input, "base", _configDescriptors.createCachedDescriptors, envName),
|
||||||
|
overrides: (input, index) => buildOverrideDescriptors(input, "base", _configDescriptors.createCachedDescriptors, index),
|
||||||
|
overridesEnv: (input, index, envName) => buildOverrideEnvDescriptors(input, "base", _configDescriptors.createCachedDescriptors, index, envName),
|
||||||
|
createLogger: (input, context, baseLogger) => buildProgrammaticLogger(input, context, baseLogger)
|
||||||
|
});
|
||||||
|
const loadFileChainWalker = makeChainWalker({
|
||||||
|
root: file => loadFileDescriptors(file),
|
||||||
|
env: (file, envName) => loadFileEnvDescriptors(file)(envName),
|
||||||
|
overrides: (file, index) => loadFileOverridesDescriptors(file)(index),
|
||||||
|
overridesEnv: (file, index, envName) => loadFileOverridesEnvDescriptors(file)(index)(envName),
|
||||||
|
createLogger: (file, context, baseLogger) => buildFileLogger(file.filepath, context, baseLogger)
|
||||||
|
});
|
||||||
|
function* loadFileChain(input, context, files, baseLogger) {
|
||||||
|
const chain = yield* loadFileChainWalker(input, context, files, baseLogger);
|
||||||
|
if (chain) {
|
||||||
|
chain.files.add(input.filepath);
|
||||||
|
}
|
||||||
|
return chain;
|
||||||
|
}
|
||||||
|
const loadFileDescriptors = (0, _caching.makeWeakCacheSync)(file => buildRootDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors));
|
||||||
|
const loadFileEnvDescriptors = (0, _caching.makeWeakCacheSync)(file => (0, _caching.makeStrongCacheSync)(envName => buildEnvDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, envName)));
|
||||||
|
const loadFileOverridesDescriptors = (0, _caching.makeWeakCacheSync)(file => (0, _caching.makeStrongCacheSync)(index => buildOverrideDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, index)));
|
||||||
|
const loadFileOverridesEnvDescriptors = (0, _caching.makeWeakCacheSync)(file => (0, _caching.makeStrongCacheSync)(index => (0, _caching.makeStrongCacheSync)(envName => buildOverrideEnvDescriptors(file, file.filepath, _configDescriptors.createUncachedDescriptors, index, envName))));
|
||||||
|
function buildFileLogger(filepath, context, baseLogger) {
|
||||||
|
if (!baseLogger) {
|
||||||
|
return () => {};
|
||||||
|
}
|
||||||
|
return baseLogger.configure(context.showConfig, _printer.ChainFormatter.Config, {
|
||||||
|
filepath
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function buildRootDescriptors({
|
||||||
|
dirname,
|
||||||
|
options
|
||||||
|
}, alias, descriptors) {
|
||||||
|
return descriptors(dirname, options, alias);
|
||||||
|
}
|
||||||
|
function buildProgrammaticLogger(_, context, baseLogger) {
|
||||||
|
var _context$caller;
|
||||||
|
if (!baseLogger) {
|
||||||
|
return () => {};
|
||||||
|
}
|
||||||
|
return baseLogger.configure(context.showConfig, _printer.ChainFormatter.Programmatic, {
|
||||||
|
callerName: (_context$caller = context.caller) == null ? void 0 : _context$caller.name
|
||||||
|
});
|
||||||
|
}
|
||||||
|
function buildEnvDescriptors({
|
||||||
|
dirname,
|
||||||
|
options
|
||||||
|
}, alias, descriptors, envName) {
|
||||||
|
const opts = options.env && options.env[envName];
|
||||||
|
return opts ? descriptors(dirname, opts, `${alias}.env["${envName}"]`) : null;
|
||||||
|
}
|
||||||
|
function buildOverrideDescriptors({
|
||||||
|
dirname,
|
||||||
|
options
|
||||||
|
}, alias, descriptors, index) {
|
||||||
|
const opts = options.overrides && options.overrides[index];
|
||||||
|
if (!opts) throw new Error("Assertion failure - missing override");
|
||||||
|
return descriptors(dirname, opts, `${alias}.overrides[${index}]`);
|
||||||
|
}
|
||||||
|
function buildOverrideEnvDescriptors({
|
||||||
|
dirname,
|
||||||
|
options
|
||||||
|
}, alias, descriptors, index, envName) {
|
||||||
|
const override = options.overrides && options.overrides[index];
|
||||||
|
if (!override) throw new Error("Assertion failure - missing override");
|
||||||
|
const opts = override.env && override.env[envName];
|
||||||
|
return opts ? descriptors(dirname, opts, `${alias}.overrides[${index}].env["${envName}"]`) : null;
|
||||||
|
}
|
||||||
|
function makeChainWalker({
|
||||||
|
root,
|
||||||
|
env,
|
||||||
|
overrides,
|
||||||
|
overridesEnv,
|
||||||
|
createLogger
|
||||||
|
}) {
|
||||||
|
return function* chainWalker(input, context, files = new Set(), baseLogger) {
|
||||||
|
const {
|
||||||
|
dirname
|
||||||
|
} = input;
|
||||||
|
const flattenedConfigs = [];
|
||||||
|
const rootOpts = root(input);
|
||||||
|
if (configIsApplicable(rootOpts, dirname, context, input.filepath)) {
|
||||||
|
flattenedConfigs.push({
|
||||||
|
config: rootOpts,
|
||||||
|
envName: undefined,
|
||||||
|
index: undefined
|
||||||
|
});
|
||||||
|
const envOpts = env(input, context.envName);
|
||||||
|
if (envOpts && configIsApplicable(envOpts, dirname, context, input.filepath)) {
|
||||||
|
flattenedConfigs.push({
|
||||||
|
config: envOpts,
|
||||||
|
envName: context.envName,
|
||||||
|
index: undefined
|
||||||
|
});
|
||||||
|
}
|
||||||
|
(rootOpts.options.overrides || []).forEach((_, index) => {
|
||||||
|
const overrideOps = overrides(input, index);
|
||||||
|
if (configIsApplicable(overrideOps, dirname, context, input.filepath)) {
|
||||||
|
flattenedConfigs.push({
|
||||||
|
config: overrideOps,
|
||||||
|
index,
|
||||||
|
envName: undefined
|
||||||
|
});
|
||||||
|
const overrideEnvOpts = overridesEnv(input, index, context.envName);
|
||||||
|
if (overrideEnvOpts && configIsApplicable(overrideEnvOpts, dirname, context, input.filepath)) {
|
||||||
|
flattenedConfigs.push({
|
||||||
|
config: overrideEnvOpts,
|
||||||
|
index,
|
||||||
|
envName: context.envName
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (flattenedConfigs.some(({
|
||||||
|
config: {
|
||||||
|
options: {
|
||||||
|
ignore,
|
||||||
|
only
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}) => shouldIgnore(context, ignore, only, dirname))) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
const chain = emptyChain();
|
||||||
|
const logger = createLogger(input, context, baseLogger);
|
||||||
|
for (const {
|
||||||
|
config,
|
||||||
|
index,
|
||||||
|
envName
|
||||||
|
} of flattenedConfigs) {
|
||||||
|
if (!(yield* mergeExtendsChain(chain, config.options, dirname, context, files, baseLogger))) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
logger(config, index, envName);
|
||||||
|
yield* mergeChainOpts(chain, config);
|
||||||
|
}
|
||||||
|
return chain;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function* mergeExtendsChain(chain, opts, dirname, context, files, baseLogger) {
|
||||||
|
if (opts.extends === undefined) return true;
|
||||||
|
const file = yield* (0, _files.loadConfig)(opts.extends, dirname, context.envName, context.caller);
|
||||||
|
if (files.has(file)) {
|
||||||
|
throw new Error(`Configuration cycle detected loading ${file.filepath}.\n` + `File already loaded following the config chain:\n` + Array.from(files, file => ` - ${file.filepath}`).join("\n"));
|
||||||
|
}
|
||||||
|
files.add(file);
|
||||||
|
const fileChain = yield* loadFileChain(validateExtendFile(file), context, files, baseLogger);
|
||||||
|
files.delete(file);
|
||||||
|
if (!fileChain) return false;
|
||||||
|
mergeChain(chain, fileChain);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
function mergeChain(target, source) {
|
||||||
|
target.options.push(...source.options);
|
||||||
|
target.plugins.push(...source.plugins);
|
||||||
|
target.presets.push(...source.presets);
|
||||||
|
for (const file of source.files) {
|
||||||
|
target.files.add(file);
|
||||||
|
}
|
||||||
|
return target;
|
||||||
|
}
|
||||||
|
function* mergeChainOpts(target, {
|
||||||
|
options,
|
||||||
|
plugins,
|
||||||
|
presets
|
||||||
|
}) {
|
||||||
|
target.options.push(options);
|
||||||
|
target.plugins.push(...(yield* plugins()));
|
||||||
|
target.presets.push(...(yield* presets()));
|
||||||
|
return target;
|
||||||
|
}
|
||||||
|
function emptyChain() {
|
||||||
|
return {
|
||||||
|
options: [],
|
||||||
|
presets: [],
|
||||||
|
plugins: [],
|
||||||
|
files: new Set()
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function normalizeOptions(opts) {
|
||||||
|
const options = Object.assign({}, opts);
|
||||||
|
delete options.extends;
|
||||||
|
delete options.env;
|
||||||
|
delete options.overrides;
|
||||||
|
delete options.plugins;
|
||||||
|
delete options.presets;
|
||||||
|
delete options.passPerPreset;
|
||||||
|
delete options.ignore;
|
||||||
|
delete options.only;
|
||||||
|
delete options.test;
|
||||||
|
delete options.include;
|
||||||
|
delete options.exclude;
|
||||||
|
if (Object.prototype.hasOwnProperty.call(options, "sourceMap")) {
|
||||||
|
options.sourceMaps = options.sourceMap;
|
||||||
|
delete options.sourceMap;
|
||||||
|
}
|
||||||
|
return options;
|
||||||
|
}
|
||||||
|
function dedupDescriptors(items) {
|
||||||
|
const map = new Map();
|
||||||
|
const descriptors = [];
|
||||||
|
for (const item of items) {
|
||||||
|
if (typeof item.value === "function") {
|
||||||
|
const fnKey = item.value;
|
||||||
|
let nameMap = map.get(fnKey);
|
||||||
|
if (!nameMap) {
|
||||||
|
nameMap = new Map();
|
||||||
|
map.set(fnKey, nameMap);
|
||||||
|
}
|
||||||
|
let desc = nameMap.get(item.name);
|
||||||
|
if (!desc) {
|
||||||
|
desc = {
|
||||||
|
value: item
|
||||||
|
};
|
||||||
|
descriptors.push(desc);
|
||||||
|
if (!item.ownPass) nameMap.set(item.name, desc);
|
||||||
|
} else {
|
||||||
|
desc.value = item;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
descriptors.push({
|
||||||
|
value: item
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return descriptors.reduce((acc, desc) => {
|
||||||
|
acc.push(desc.value);
|
||||||
|
return acc;
|
||||||
|
}, []);
|
||||||
|
}
|
||||||
|
function configIsApplicable({
|
||||||
|
options
|
||||||
|
}, dirname, context, configName) {
|
||||||
|
return (options.test === undefined || configFieldIsApplicable(context, options.test, dirname, configName)) && (options.include === undefined || configFieldIsApplicable(context, options.include, dirname, configName)) && (options.exclude === undefined || !configFieldIsApplicable(context, options.exclude, dirname, configName));
|
||||||
|
}
|
||||||
|
function configFieldIsApplicable(context, test, dirname, configName) {
|
||||||
|
const patterns = Array.isArray(test) ? test : [test];
|
||||||
|
return matchesPatterns(context, patterns, dirname, configName);
|
||||||
|
}
|
||||||
|
function ignoreListReplacer(_key, value) {
|
||||||
|
if (value instanceof RegExp) {
|
||||||
|
return String(value);
|
||||||
|
}
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
function shouldIgnore(context, ignore, only, dirname) {
|
||||||
|
if (ignore && matchesPatterns(context, ignore, dirname)) {
|
||||||
|
var _context$filename;
|
||||||
|
const message = `No config is applied to "${(_context$filename = context.filename) != null ? _context$filename : "(unknown)"}" because it matches one of \`ignore: ${JSON.stringify(ignore, ignoreListReplacer)}\` from "${dirname}"`;
|
||||||
|
debug(message);
|
||||||
|
if (context.showConfig) {
|
||||||
|
console.log(message);
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (only && !matchesPatterns(context, only, dirname)) {
|
||||||
|
var _context$filename2;
|
||||||
|
const message = `No config is applied to "${(_context$filename2 = context.filename) != null ? _context$filename2 : "(unknown)"}" because it fails to match one of \`only: ${JSON.stringify(only, ignoreListReplacer)}\` from "${dirname}"`;
|
||||||
|
debug(message);
|
||||||
|
if (context.showConfig) {
|
||||||
|
console.log(message);
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
function matchesPatterns(context, patterns, dirname, configName) {
|
||||||
|
return patterns.some(pattern => matchPattern(pattern, dirname, context.filename, context, configName));
|
||||||
|
}
|
||||||
|
function matchPattern(pattern, dirname, pathToTest, context, configName) {
|
||||||
|
if (typeof pattern === "function") {
|
||||||
|
return !!(0, _rewriteStackTrace.endHiddenCallStack)(pattern)(pathToTest, {
|
||||||
|
dirname,
|
||||||
|
envName: context.envName,
|
||||||
|
caller: context.caller
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (typeof pathToTest !== "string") {
|
||||||
|
throw new _configError.default(`Configuration contains string/RegExp pattern, but no filename was passed to Babel`, configName);
|
||||||
|
}
|
||||||
|
if (typeof pattern === "string") {
|
||||||
|
pattern = (0, _patternToRegex.default)(pattern, dirname);
|
||||||
|
}
|
||||||
|
return pattern.test(pathToTest);
|
||||||
|
}
|
||||||
|
0 && 0;
|
||||||
|
|
||||||
|
//# sourceMappingURL=config-chain.js.map
|
File diff suppressed because one or more lines are too long
189
frontend/node_modules/@babel/core/lib/config/config-descriptors.js
generated
vendored
Normal file
189
frontend/node_modules/@babel/core/lib/config/config-descriptors.js
generated
vendored
Normal file
|
@ -0,0 +1,189 @@
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
Object.defineProperty(exports, "__esModule", {
|
||||||
|
value: true
|
||||||
|
});
|
||||||
|
exports.createCachedDescriptors = createCachedDescriptors;
|
||||||
|
exports.createDescriptor = createDescriptor;
|
||||||
|
exports.createUncachedDescriptors = createUncachedDescriptors;
|
||||||
|
function _gensync() {
|
||||||
|
const data = require("gensync");
|
||||||
|
_gensync = function () {
|
||||||
|
return data;
|
||||||
|
};
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
var _functional = require("../gensync-utils/functional");
|
||||||
|
var _files = require("./files");
|
||||||
|
var _item = require("./item");
|
||||||
|
var _caching = require("./caching");
|
||||||
|
var _resolveTargets = require("./resolve-targets");
|
||||||
|
function isEqualDescriptor(a, b) {
|
||||||
|
return a.name === b.name && a.value === b.value && a.options === b.options && a.dirname === b.dirname && a.alias === b.alias && a.ownPass === b.ownPass && (a.file && a.file.request) === (b.file && b.file.request) && (a.file && a.file.resolved) === (b.file && b.file.resolved);
|
||||||
|
}
|
||||||
|
function* handlerOf(value) {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
function optionsWithResolvedBrowserslistConfigFile(options, dirname) {
|
||||||
|
if (typeof options.browserslistConfigFile === "string") {
|
||||||
|
options.browserslistConfigFile = (0, _resolveTargets.resolveBrowserslistConfigFile)(options.browserslistConfigFile, dirname);
|
||||||
|
}
|
||||||
|
return options;
|
||||||
|
}
|
||||||
|
function createCachedDescriptors(dirname, options, alias) {
|
||||||
|
const {
|
||||||
|
plugins,
|
||||||
|
presets,
|
||||||
|
passPerPreset
|
||||||
|
} = options;
|
||||||
|
return {
|
||||||
|
options: optionsWithResolvedBrowserslistConfigFile(options, dirname),
|
||||||
|
plugins: plugins ? () => createCachedPluginDescriptors(plugins, dirname)(alias) : () => handlerOf([]),
|
||||||
|
presets: presets ? () => createCachedPresetDescriptors(presets, dirname)(alias)(!!passPerPreset) : () => handlerOf([])
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function createUncachedDescriptors(dirname, options, alias) {
|
||||||
|
return {
|
||||||
|
options: optionsWithResolvedBrowserslistConfigFile(options, dirname),
|
||||||
|
plugins: (0, _functional.once)(() => createPluginDescriptors(options.plugins || [], dirname, alias)),
|
||||||
|
presets: (0, _functional.once)(() => createPresetDescriptors(options.presets || [], dirname, alias, !!options.passPerPreset))
|
||||||
|
};
|
||||||
|
}
|
||||||
|
const PRESET_DESCRIPTOR_CACHE = new WeakMap();
|
||||||
|
const createCachedPresetDescriptors = (0, _caching.makeWeakCacheSync)((items, cache) => {
|
||||||
|
const dirname = cache.using(dir => dir);
|
||||||
|
return (0, _caching.makeStrongCacheSync)(alias => (0, _caching.makeStrongCache)(function* (passPerPreset) {
|
||||||
|
const descriptors = yield* createPresetDescriptors(items, dirname, alias, passPerPreset);
|
||||||
|
return descriptors.map(desc => loadCachedDescriptor(PRESET_DESCRIPTOR_CACHE, desc));
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
const PLUGIN_DESCRIPTOR_CACHE = new WeakMap();
|
||||||
|
const createCachedPluginDescriptors = (0, _caching.makeWeakCacheSync)((items, cache) => {
|
||||||
|
const dirname = cache.using(dir => dir);
|
||||||
|
return (0, _caching.makeStrongCache)(function* (alias) {
|
||||||
|
const descriptors = yield* createPluginDescriptors(items, dirname, alias);
|
||||||
|
return descriptors.map(desc => loadCachedDescriptor(PLUGIN_DESCRIPTOR_CACHE, desc));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
const DEFAULT_OPTIONS = {};
|
||||||
|
function loadCachedDescriptor(cache, desc) {
|
||||||
|
const {
|
||||||
|
value,
|
||||||
|
options = DEFAULT_OPTIONS
|
||||||
|
} = desc;
|
||||||
|
if (options === false) return desc;
|
||||||
|
let cacheByOptions = cache.get(value);
|
||||||
|
if (!cacheByOptions) {
|
||||||
|
cacheByOptions = new WeakMap();
|
||||||
|
cache.set(value, cacheByOptions);
|
||||||
|
}
|
||||||
|
let possibilities = cacheByOptions.get(options);
|
||||||
|
if (!possibilities) {
|
||||||
|
possibilities = [];
|
||||||
|
cacheByOptions.set(options, possibilities);
|
||||||
|
}
|
||||||
|
if (possibilities.indexOf(desc) === -1) {
|
||||||
|
const matches = possibilities.filter(possibility => isEqualDescriptor(possibility, desc));
|
||||||
|
if (matches.length > 0) {
|
||||||
|
return matches[0];
|
||||||
|
}
|
||||||
|
possibilities.push(desc);
|
||||||
|
}
|
||||||
|
return desc;
|
||||||
|
}
|
||||||
|
function* createPresetDescriptors(items, dirname, alias, passPerPreset) {
|
||||||
|
return yield* createDescriptors("preset", items, dirname, alias, passPerPreset);
|
||||||
|
}
|
||||||
|
function* createPluginDescriptors(items, dirname, alias) {
|
||||||
|
return yield* createDescriptors("plugin", items, dirname, alias);
|
||||||
|
}
|
||||||
|
function* createDescriptors(type, items, dirname, alias, ownPass) {
|
||||||
|
const descriptors = yield* _gensync().all(items.map((item, index) => createDescriptor(item, dirname, {
|
||||||
|
type,
|
||||||
|
alias: `${alias}$${index}`,
|
||||||
|
ownPass: !!ownPass
|
||||||
|
})));
|
||||||
|
assertNoDuplicates(descriptors);
|
||||||
|
return descriptors;
|
||||||
|
}
|
||||||
|
function* createDescriptor(pair, dirname, {
|
||||||
|
type,
|
||||||
|
alias,
|
||||||
|
ownPass
|
||||||
|
}) {
|
||||||
|
const desc = (0, _item.getItemDescriptor)(pair);
|
||||||
|
if (desc) {
|
||||||
|
return desc;
|
||||||
|
}
|
||||||
|
let name;
|
||||||
|
let options;
|
||||||
|
let value = pair;
|
||||||
|
if (Array.isArray(value)) {
|
||||||
|
if (value.length === 3) {
|
||||||
|
[value, options, name] = value;
|
||||||
|
} else {
|
||||||
|
[value, options] = value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let file = undefined;
|
||||||
|
let filepath = null;
|
||||||
|
if (typeof value === "string") {
|
||||||
|
if (typeof type !== "string") {
|
||||||
|
throw new Error("To resolve a string-based item, the type of item must be given");
|
||||||
|
}
|
||||||
|
const resolver = type === "plugin" ? _files.loadPlugin : _files.loadPreset;
|
||||||
|
const request = value;
|
||||||
|
({
|
||||||
|
filepath,
|
||||||
|
value
|
||||||
|
} = yield* resolver(value, dirname));
|
||||||
|
file = {
|
||||||
|
request,
|
||||||
|
resolved: filepath
|
||||||
|
};
|
||||||
|
}
|
||||||
|
if (!value) {
|
||||||
|
throw new Error(`Unexpected falsy value: ${String(value)}`);
|
||||||
|
}
|
||||||
|
if (typeof value === "object" && value.__esModule) {
|
||||||
|
if (value.default) {
|
||||||
|
value = value.default;
|
||||||
|
} else {
|
||||||
|
throw new Error("Must export a default export when using ES6 modules.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (typeof value !== "object" && typeof value !== "function") {
|
||||||
|
throw new Error(`Unsupported format: ${typeof value}. Expected an object or a function.`);
|
||||||
|
}
|
||||||
|
if (filepath !== null && typeof value === "object" && value) {
|
||||||
|
throw new Error(`Plugin/Preset files are not allowed to export objects, only functions. In ${filepath}`);
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
name,
|
||||||
|
alias: filepath || alias,
|
||||||
|
value,
|
||||||
|
options,
|
||||||
|
dirname,
|
||||||
|
ownPass,
|
||||||
|
file
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function assertNoDuplicates(items) {
|
||||||
|
const map = new Map();
|
||||||
|
for (const item of items) {
|
||||||
|
if (typeof item.value !== "function") continue;
|
||||||
|
let nameMap = map.get(item.value);
|
||||||
|
if (!nameMap) {
|
||||||
|
nameMap = new Set();
|
||||||
|
map.set(item.value, nameMap);
|
||||||
|
}
|
||||||
|
if (nameMap.has(item.name)) {
|
||||||
|
const conflicts = items.filter(i => i.value === item.value);
|
||||||
|
throw new Error([`Duplicate plugin/preset detected.`, `If you'd like to use two separate instances of a plugin,`, `they need separate names, e.g.`, ``, ` plugins: [`, ` ['some-plugin', {}],`, ` ['some-plugin', {}, 'some unique name'],`, ` ]`, ``, `Duplicates detected are:`, `${JSON.stringify(conflicts, null, 2)}`].join("\n"));
|
||||||
|
}
|
||||||
|
nameMap.add(item.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
0 && 0;
|
||||||
|
|
||||||
|
//# sourceMappingURL=config-descriptors.js.map
|
1
frontend/node_modules/@babel/core/lib/config/config-descriptors.js.map
generated
vendored
Normal file
1
frontend/node_modules/@babel/core/lib/config/config-descriptors.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
283
frontend/node_modules/@babel/core/lib/config/files/configuration.js
generated
vendored
Normal file
283
frontend/node_modules/@babel/core/lib/config/files/configuration.js
generated
vendored
Normal file
|
@ -0,0 +1,283 @@
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
Object.defineProperty(exports, "__esModule", {
|
||||||
|
value: true
|
||||||
|
});
|
||||||
|
exports.ROOT_CONFIG_FILENAMES = void 0;
|
||||||
|
exports.findConfigUpwards = findConfigUpwards;
|
||||||
|
exports.findRelativeConfig = findRelativeConfig;
|
||||||
|
exports.findRootConfig = findRootConfig;
|
||||||
|
exports.loadConfig = loadConfig;
|
||||||
|
exports.resolveShowConfigPath = resolveShowConfigPath;
|
||||||
|
function _debug() {
|
||||||
|
const data = require("debug");
|
||||||
|
_debug = function () {
|
||||||
|
return data;
|
||||||
|
};
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
function _fs() {
|
||||||
|
const data = require("fs");
|
||||||
|
_fs = function () {
|
||||||
|
return data;
|
||||||
|
};
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
function _path() {
|
||||||
|
const data = require("path");
|
||||||
|
_path = function () {
|
||||||
|
return data;
|
||||||
|
};
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
function _json() {
|
||||||
|
const data = require("json5");
|
||||||
|
_json = function () {
|
||||||
|
return data;
|
||||||
|
};
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
function _gensync() {
|
||||||
|
const data = require("gensync");
|
||||||
|
_gensync = function () {
|
||||||
|
return data;
|
||||||
|
};
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
var _caching = require("../caching");
|
||||||
|
var _configApi = require("../helpers/config-api");
|
||||||
|
var _utils = require("./utils");
|
||||||
|
var _moduleTypes = require("./module-types");
|
||||||
|
var _patternToRegex = require("../pattern-to-regex");
|
||||||
|
var _configError = require("../../errors/config-error");
|
||||||
|
var fs = require("../../gensync-utils/fs");
|
||||||
|
function _module() {
|
||||||
|
const data = require("module");
|
||||||
|
_module = function () {
|
||||||
|
return data;
|
||||||
|
};
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
var _rewriteStackTrace = require("../../errors/rewrite-stack-trace");
|
||||||
|
const debug = _debug()("babel:config:loading:files:configuration");
|
||||||
|
const ROOT_CONFIG_FILENAMES = ["babel.config.js", "babel.config.cjs", "babel.config.mjs", "babel.config.json"];
|
||||||
|
exports.ROOT_CONFIG_FILENAMES = ROOT_CONFIG_FILENAMES;
|
||||||
|
const RELATIVE_CONFIG_FILENAMES = [".babelrc", ".babelrc.js", ".babelrc.cjs", ".babelrc.mjs", ".babelrc.json"];
|
||||||
|
const BABELIGNORE_FILENAME = ".babelignore";
|
||||||
|
const LOADING_CONFIGS = new Set();
|
||||||
|
const readConfigJS = (0, _caching.makeStrongCache)(function* readConfigJS(filepath, cache) {
|
||||||
|
if (!_fs().existsSync(filepath)) {
|
||||||
|
cache.never();
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (LOADING_CONFIGS.has(filepath)) {
|
||||||
|
cache.never();
|
||||||
|
debug("Auto-ignoring usage of config %o.", filepath);
|
||||||
|
return {
|
||||||
|
filepath,
|
||||||
|
dirname: _path().dirname(filepath),
|
||||||
|
options: {}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
let options;
|
||||||
|
try {
|
||||||
|
LOADING_CONFIGS.add(filepath);
|
||||||
|
options = yield* (0, _moduleTypes.default)(filepath, "You appear to be using a native ECMAScript module configuration " + "file, which is only supported when running Babel asynchronously.");
|
||||||
|
} finally {
|
||||||
|
LOADING_CONFIGS.delete(filepath);
|
||||||
|
}
|
||||||
|
let assertCache = false;
|
||||||
|
if (typeof options === "function") {
|
||||||
|
yield* [];
|
||||||
|
options = (0, _rewriteStackTrace.endHiddenCallStack)(options)((0, _configApi.makeConfigAPI)(cache));
|
||||||
|
assertCache = true;
|
||||||
|
}
|
||||||
|
if (!options || typeof options !== "object" || Array.isArray(options)) {
|
||||||
|
throw new _configError.default(`Configuration should be an exported JavaScript object.`, filepath);
|
||||||
|
}
|
||||||
|
if (typeof options.then === "function") {
|
||||||
|
throw new _configError.default(`You appear to be using an async configuration, ` + `which your current version of Babel does not support. ` + `We may add support for this in the future, ` + `but if you're on the most recent version of @babel/core and still ` + `seeing this error, then you'll need to synchronously return your config.`, filepath);
|
||||||
|
}
|
||||||
|
if (assertCache && !cache.configured()) throwConfigError(filepath);
|
||||||
|
return {
|
||||||
|
filepath,
|
||||||
|
dirname: _path().dirname(filepath),
|
||||||
|
options
|
||||||
|
};
|
||||||
|
});
|
||||||
|
const packageToBabelConfig = (0, _caching.makeWeakCacheSync)(file => {
|
||||||
|
const babel = file.options["babel"];
|
||||||
|
if (typeof babel === "undefined") return null;
|
||||||
|
if (typeof babel !== "object" || Array.isArray(babel) || babel === null) {
|
||||||
|
throw new _configError.default(`.babel property must be an object`, file.filepath);
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
filepath: file.filepath,
|
||||||
|
dirname: file.dirname,
|
||||||
|
options: babel
|
||||||
|
};
|
||||||
|
});
|
||||||
|
const readConfigJSON5 = (0, _utils.makeStaticFileCache)((filepath, content) => {
|
||||||
|
let options;
|
||||||
|
try {
|
||||||
|
options = _json().parse(content);
|
||||||
|
} catch (err) {
|
||||||
|
throw new _configError.default(`Error while parsing config - ${err.message}`, filepath);
|
||||||
|
}
|
||||||
|
if (!options) throw new _configError.default(`No config detected`, filepath);
|
||||||
|
if (typeof options !== "object") {
|
||||||
|
throw new _configError.default(`Config returned typeof ${typeof options}`, filepath);
|
||||||
|
}
|
||||||
|
if (Array.isArray(options)) {
|
||||||
|
throw new _configError.default(`Expected config object but found array`, filepath);
|
||||||
|
}
|
||||||
|
delete options["$schema"];
|
||||||
|
return {
|
||||||
|
filepath,
|
||||||
|
dirname: _path().dirname(filepath),
|
||||||
|
options
|
||||||
|
};
|
||||||
|
});
|
||||||
|
const readIgnoreConfig = (0, _utils.makeStaticFileCache)((filepath, content) => {
|
||||||
|
const ignoreDir = _path().dirname(filepath);
|
||||||
|
const ignorePatterns = content.split("\n").map(line => line.replace(/#(.*?)$/, "").trim()).filter(line => !!line);
|
||||||
|
for (const pattern of ignorePatterns) {
|
||||||
|
if (pattern[0] === "!") {
|
||||||
|
throw new _configError.default(`Negation of file paths is not supported.`, filepath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
filepath,
|
||||||
|
dirname: _path().dirname(filepath),
|
||||||
|
ignore: ignorePatterns.map(pattern => (0, _patternToRegex.default)(pattern, ignoreDir))
|
||||||
|
};
|
||||||
|
});
|
||||||
|
function findConfigUpwards(rootDir) {
|
||||||
|
let dirname = rootDir;
|
||||||
|
for (;;) {
|
||||||
|
for (const filename of ROOT_CONFIG_FILENAMES) {
|
||||||
|
if (_fs().existsSync(_path().join(dirname, filename))) {
|
||||||
|
return dirname;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const nextDir = _path().dirname(dirname);
|
||||||
|
if (dirname === nextDir) break;
|
||||||
|
dirname = nextDir;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
function* findRelativeConfig(packageData, envName, caller) {
|
||||||
|
let config = null;
|
||||||
|
let ignore = null;
|
||||||
|
const dirname = _path().dirname(packageData.filepath);
|
||||||
|
for (const loc of packageData.directories) {
|
||||||
|
if (!config) {
|
||||||
|
var _packageData$pkg;
|
||||||
|
config = yield* loadOneConfig(RELATIVE_CONFIG_FILENAMES, loc, envName, caller, ((_packageData$pkg = packageData.pkg) == null ? void 0 : _packageData$pkg.dirname) === loc ? packageToBabelConfig(packageData.pkg) : null);
|
||||||
|
}
|
||||||
|
if (!ignore) {
|
||||||
|
const ignoreLoc = _path().join(loc, BABELIGNORE_FILENAME);
|
||||||
|
ignore = yield* readIgnoreConfig(ignoreLoc);
|
||||||
|
if (ignore) {
|
||||||
|
debug("Found ignore %o from %o.", ignore.filepath, dirname);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
config,
|
||||||
|
ignore
|
||||||
|
};
|
||||||
|
}
|
||||||
|
function findRootConfig(dirname, envName, caller) {
|
||||||
|
return loadOneConfig(ROOT_CONFIG_FILENAMES, dirname, envName, caller);
|
||||||
|
}
|
||||||
|
function* loadOneConfig(names, dirname, envName, caller, previousConfig = null) {
|
||||||
|
const configs = yield* _gensync().all(names.map(filename => readConfig(_path().join(dirname, filename), envName, caller)));
|
||||||
|
const config = configs.reduce((previousConfig, config) => {
|
||||||
|
if (config && previousConfig) {
|
||||||
|
throw new _configError.default(`Multiple configuration files found. Please remove one:\n` + ` - ${_path().basename(previousConfig.filepath)}\n` + ` - ${config.filepath}\n` + `from ${dirname}`);
|
||||||
|
}
|
||||||
|
return config || previousConfig;
|
||||||
|
}, previousConfig);
|
||||||
|
if (config) {
|
||||||
|
debug("Found configuration %o from %o.", config.filepath, dirname);
|
||||||
|
}
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
function* loadConfig(name, dirname, envName, caller) {
|
||||||
|
const filepath = (((v, w) => (v = v.split("."), w = w.split("."), +v[0] > +w[0] || v[0] == w[0] && +v[1] >= +w[1]))(process.versions.node, "8.9") ? require.resolve : (r, {
|
||||||
|
paths: [b]
|
||||||
|
}, M = require("module")) => {
|
||||||
|
let f = M._findPath(r, M._nodeModulePaths(b).concat(b));
|
||||||
|
if (f) return f;
|
||||||
|
f = new Error(`Cannot resolve module '${r}'`);
|
||||||
|
f.code = "MODULE_NOT_FOUND";
|
||||||
|
throw f;
|
||||||
|
})(name, {
|
||||||
|
paths: [dirname]
|
||||||
|
});
|
||||||
|
const conf = yield* readConfig(filepath, envName, caller);
|
||||||
|
if (!conf) {
|
||||||
|
throw new _configError.default(`Config file contains no configuration data`, filepath);
|
||||||
|
}
|
||||||
|
debug("Loaded config %o from %o.", name, dirname);
|
||||||
|
return conf;
|
||||||
|
}
|
||||||
|
function readConfig(filepath, envName, caller) {
|
||||||
|
const ext = _path().extname(filepath);
|
||||||
|
return ext === ".js" || ext === ".cjs" || ext === ".mjs" ? readConfigJS(filepath, {
|
||||||
|
envName,
|
||||||
|
caller
|
||||||
|
}) : readConfigJSON5(filepath);
|
||||||
|
}
|
||||||
|
function* resolveShowConfigPath(dirname) {
|
||||||
|
const targetPath = process.env.BABEL_SHOW_CONFIG_FOR;
|
||||||
|
if (targetPath != null) {
|
||||||
|
const absolutePath = _path().resolve(dirname, targetPath);
|
||||||
|
const stats = yield* fs.stat(absolutePath);
|
||||||
|
if (!stats.isFile()) {
|
||||||
|
throw new Error(`${absolutePath}: BABEL_SHOW_CONFIG_FOR must refer to a regular file, directories are not supported.`);
|
||||||
|
}
|
||||||
|
return absolutePath;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
function throwConfigError(filepath) {
|
||||||
|
throw new _configError.default(`\
|
||||||
|
Caching was left unconfigured. Babel's plugins, presets, and .babelrc.js files can be configured
|
||||||
|
for various types of caching, using the first param of their handler functions:
|
||||||
|
|
||||||
|
module.exports = function(api) {
|
||||||
|
// The API exposes the following:
|
||||||
|
|
||||||
|
// Cache the returned value forever and don't call this function again.
|
||||||
|
api.cache(true);
|
||||||
|
|
||||||
|
// Don't cache at all. Not recommended because it will be very slow.
|
||||||
|
api.cache(false);
|
||||||
|
|
||||||
|
// Cached based on the value of some function. If this function returns a value different from
|
||||||
|
// a previously-encountered value, the plugins will re-evaluate.
|
||||||
|
var env = api.cache(() => process.env.NODE_ENV);
|
||||||
|
|
||||||
|
// If testing for a specific env, we recommend specifics to avoid instantiating a plugin for
|
||||||
|
// any possible NODE_ENV value that might come up during plugin execution.
|
||||||
|
var isProd = api.cache(() => process.env.NODE_ENV === "production");
|
||||||
|
|
||||||
|
// .cache(fn) will perform a linear search though instances to find the matching plugin based
|
||||||
|
// based on previous instantiated plugins. If you want to recreate the plugin and discard the
|
||||||
|
// previous instance whenever something changes, you may use:
|
||||||
|
var isProd = api.cache.invalidate(() => process.env.NODE_ENV === "production");
|
||||||
|
|
||||||
|
// Note, we also expose the following more-verbose versions of the above examples:
|
||||||
|
api.cache.forever(); // api.cache(true)
|
||||||
|
api.cache.never(); // api.cache(false)
|
||||||
|
api.cache.using(fn); // api.cache(fn)
|
||||||
|
|
||||||
|
// Return the value that will be cached.
|
||||||
|
return { };
|
||||||
|
};`, filepath);
|
||||||
|
}
|
||||||
|
0 && 0;
|
||||||
|
|
||||||
|
//# sourceMappingURL=configuration.js.map
|
1
frontend/node_modules/@babel/core/lib/config/files/configuration.js.map
generated
vendored
Normal file
1
frontend/node_modules/@babel/core/lib/config/files/configuration.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
33
frontend/node_modules/@babel/core/lib/config/files/import-meta-resolve.js
generated
vendored
Normal file
33
frontend/node_modules/@babel/core/lib/config/files/import-meta-resolve.js
generated
vendored
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
Object.defineProperty(exports, "__esModule", {
|
||||||
|
value: true
|
||||||
|
});
|
||||||
|
exports.default = resolve;
|
||||||
|
function _module() {
|
||||||
|
const data = require("module");
|
||||||
|
_module = function () {
|
||||||
|
return data;
|
||||||
|
};
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
var _importMetaResolve = require("../../vendor/import-meta-resolve");
|
||||||
|
function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } }
|
||||||
|
function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; }
|
||||||
|
let import_;
|
||||||
|
try {
|
||||||
|
import_ = require("./import.cjs");
|
||||||
|
} catch (_unused) {}
|
||||||
|
const importMetaResolveP = import_ && process.execArgv.includes("--experimental-import-meta-resolve") ? import_("data:text/javascript,export default import.meta.resolve").then(m => m.default || _importMetaResolve.resolve, () => _importMetaResolve.resolve) : Promise.resolve(_importMetaResolve.resolve);
|
||||||
|
function resolve(_x, _x2) {
|
||||||
|
return _resolve.apply(this, arguments);
|
||||||
|
}
|
||||||
|
function _resolve() {
|
||||||
|
_resolve = _asyncToGenerator(function* (specifier, parent) {
|
||||||
|
return (yield importMetaResolveP)(specifier, parent);
|
||||||
|
});
|
||||||
|
return _resolve.apply(this, arguments);
|
||||||
|
}
|
||||||
|
0 && 0;
|
||||||
|
|
||||||
|
//# sourceMappingURL=import-meta-resolve.js.map
|
1
frontend/node_modules/@babel/core/lib/config/files/import-meta-resolve.js.map
generated
vendored
Normal file
1
frontend/node_modules/@babel/core/lib/config/files/import-meta-resolve.js.map
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
{"version":3,"names":["import_","require","importMetaResolveP","process","execArgv","includes","then","m","default","polyfill","Promise","resolve","specifier","parent"],"sources":["../../../src/config/files/import-meta-resolve.ts"],"sourcesContent":["import { createRequire } from \"module\";\nimport { resolve as polyfill } from \"../../vendor/import-meta-resolve\";\n\nconst require = createRequire(import.meta.url);\n\nlet import_;\ntry {\n // Node < 13.3 doesn't support import() syntax.\n import_ = require(\"./import.cjs\");\n} catch {}\n\n// import.meta.resolve is only available in ESM, but this file is compiled to CJS.\n// We can extract it using dynamic import.\nconst importMetaResolveP: Promise<ImportMeta[\"resolve\"]> =\n import_ &&\n // Due to a Node.js/V8 bug (https://github.com/nodejs/node/issues/35889), we cannot\n // use always dynamic import because it segfaults when running in a Node.js `vm` context,\n // which is used by the default Jest environment and by webpack-cli.\n //\n // However, import.meta.resolve is experimental and only enabled when Node.js is run\n // with the `--experimental-import-meta-resolve` flag: we can avoid calling import()\n // when that flag is not enabled, so that the default behavior never segfaults.\n //\n // Hopefully, before Node.js unflags import.meta.resolve, either:\n // - we will move to ESM, so that we have direct access to import.meta.resolve, or\n // - the V8 bug will be fixed so that we can safely use dynamic import by default.\n //\n // I (@nicolo-ribaudo) am really anoyed by this bug, because there is no known\n // work-around other than \"don't use dynamic import if you are running in a `vm` context\",\n // but there is no reliable way to detect it (you cannot try/catch segfaults).\n //\n // This is the only place where we *need* to use dynamic import because we need to access\n // an ES module. All the other places will first try using require() and *then*, if\n // it throws because it's a module, will fallback to import().\n process.execArgv.includes(\"--experimental-import-meta-resolve\")\n ? import_(\"data:text/javascript,export default import.meta.resolve\").then(\n (m: { default: ImportMeta[\"resolve\"] | undefined }) =>\n m.default || polyfill,\n () => polyfill,\n )\n : Promise.resolve(polyfill);\n\nexport default async function resolve(\n specifier: Parameters<ImportMeta[\"resolve\"]>[0],\n parent?: Parameters<ImportMeta[\"resolve\"]>[1],\n): ReturnType<ImportMeta[\"resolve\"]> {\n return (await importMetaResolveP)(specifier, parent);\n}\n"],"mappings":";;;;;;AAAA;EAAA;EAAA;IAAA;EAAA;EAAA;AAAA;AACA;AAAuE;AAAA;AAIvE,IAAIA,OAAO;AACX,IAAI;EAEFA,OAAO,GAAGC,OAAO,CAAC,cAAc,CAAC;AACnC,CAAC,CAAC,gBAAM,CAAC;AAIT,MAAMC,kBAAkD,GACtDF,OAAO,IAoBPG,OAAO,CAACC,QAAQ,CAACC,QAAQ,CAAC,oCAAoC,CAAC,GAC3DL,OAAO,CAAC,yDAAyD,CAAC,CAACM,IAAI,CACpEC,CAAiD,IAChDA,CAAC,CAACC,OAAO,IAAIC,0BAAQ,EACvB,MAAMA,0BAAQ,CACf,GACDC,OAAO,CAACC,OAAO,CAACF,0BAAQ,CAAC;AAAC,SAEFE,OAAO;EAAA;AAAA;AAAA;EAAA,6BAAtB,WACbC,SAA+C,EAC/CC,MAA6C,EACV;IACnC,OAAO,OAAOX,kBAAkB,EAAEU,SAAS,EAAEC,MAAM,CAAC;EACtD,CAAC;EAAA;AAAA;AAAA"}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue