Project import generated by Copybara.

GitOrigin-RevId: 2fdc0ad0c4bcc478430aecc627e0c78b03c3917f
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index d4b5249..9ccf326 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,78 +1,50 @@
-# See go/cast-pre-commit for details
 default_language_version:
-    python: python3
+  python: python3
 repos:
--   hooks:
-    -   args: [--root, ../chromium/src, --target, chromecast/internal, --extra-repos, chromecast/internal]
-
-        id: check-deps
-        name: chromium-src-check-deps
-        stages:
-        - push
-    repo: sso://nest-source-internal.git.corp.google.com/hooks/check-deps
-    rev: '1.3'
--   hooks:
-    -   args: []
-
-        id: clang-format
-    repo: sso://nest-source-internal.git.corp.google.com/hooks/clang-format
-    rev: '3.4'
--   hooks:
-    -   args: []
-
-        id: cpp-blacklist
-    repo: sso://nest-source-internal.git.corp.google.com/hooks/cpp-blacklist
-    rev: '2.4'
--   hooks:
-    -   args: []
-
-        id: cpplint
-    repo: sso://nest-source-internal.git.corp.google.com/hooks/cpplint
-    rev: '1.4'
--   hooks:
-    -   id: eureka-commit-message-check
-        stages:
-        - commit-msg
-    repo: sso://nest-source-internal.git.corp.google.com/hooks/eureka-commit-message-check
-    rev: '1.9'
--   hooks:
-    -   id: eureka-prepare-commit-message
-        stages:
-        - prepare-commit-msg
-    repo: sso://nest-source-internal.git.corp.google.com/hooks/eureka-prepare-commit-message
-    rev: '1.5'
--   hooks:
-    -   id: json-format-check
-    -   id: xml-format-check
-    repo: sso://nest-source-internal.git.corp.google.com/hooks/file-format-check
-    rev: '0.4'
--   hooks:
-    -   args: [--args, import("//chromecast/internal/build/args/config/precommit/all.gni"), ../chromium/src]
-
-        id: gn-check
-        name: gn-check-default
-        stages:
-        - push
-    -   args: [--args, import("//chromecast/internal/build/args/config/precommit/audio-only.gni"),
-    ../chromium/src]
-
-        id: gn-check
-        name: gn-check-audioonly
-        stages:
-        - push
-    repo: sso://nest-source-internal.git.corp.google.com/hooks/gn-check
-    rev: '2.2'
--   hooks:
-    -   id: gn-format
-    repo: sso://nest-source-internal.git.corp.google.com/hooks/gn-format
-    rev: '2.0'
--   hooks:
-    -   id: pylint
-    repo: sso://nest-source-internal.git.corp.google.com/hooks/pylint
-    rev: '2.2'
--   hooks:
-    -   id: precommit-message-preservation
-        stages:
-        - prepare-commit-msg
-    repo: sso://nest-source-internal.git.corp.google.com/hooks/precommit-message-preservation
-    rev: '1.3'
+- hooks:
+  - args: []
+    id: cpplint
+  repo: sso://nest-source-internal.git.corp.google.com/hooks/cpplint
+  rev: '1.6'
+- hooks:
+  - id: eureka-commit-message-check
+    stages:
+    - commit-msg
+  repo: sso://nest-source-internal.git.corp.google.com/hooks/eureka-commit-message-check
+  rev: '2.0'
+- hooks:
+  - id: eureka-prepare-commit-message
+    stages:
+    - prepare-commit-msg
+  repo: sso://nest-source-internal.git.corp.google.com/hooks/eureka-prepare-commit-message
+  rev: '2.0'
+- hooks:
+  - id: json-format-check
+  - id: xml-format-check
+  repo: sso://nest-source-internal.git.corp.google.com/hooks/file-format-check
+  rev: '0.4'
+- hooks:
+  - args:
+    - --args
+    - import("//chromecast/internal/build/args/config/precommit/all.gni")
+    - ../chromium/src
+    id: gn-check
+    name: gn-check-default
+    stages:
+    - push
+  - args:
+    - --args
+    - import("//chromecast/internal/build/args/config/precommit/audio-only.gni")
+    - ../chromium/src
+    id: gn-check
+    name: gn-check-audioonly
+    stages:
+    - push
+  repo: sso://nest-source-internal.git.corp.google.com/hooks/gn-check
+  rev: '2.2'
+- hooks:
+  - id: precommit-message-preservation
+    stages:
+    - prepare-commit-msg
+  repo: sso://nest-source-internal.git.corp.google.com/hooks/precommit-message-preservation
+  rev: '1.3'
diff --git a/configs/pre-commit/requirements.in b/configs/pre-commit/requirements.in
deleted file mode 100644
index 917f9d4..0000000
--- a/configs/pre-commit/requirements.in
+++ /dev/null
@@ -1 +0,0 @@
-pre-commit==2.12.0
diff --git a/configs/pre-commit/requirements.txt b/configs/pre-commit/requirements.txt
deleted file mode 100644
index 64167f2..0000000
--- a/configs/pre-commit/requirements.txt
+++ /dev/null
@@ -1,22 +0,0 @@
-appdirs==1.4.4
-    # via virtualenv
-cfgv==3.2.0
-    # via pre-commit
-distlib==0.3.1
-    # via virtualenv
-filelock==3.0.12
-    # via virtualenv
-identify==2.2.2
-    # via pre-commit
-nodeenv==1.5.0
-    # via pre-commit
-pre-commit==2.12.0
-    # via -r requirements.in
-pyyaml==5.4.1
-    # via pre-commit
-six==1.16.0
-    # via virtualenv
-toml==0.10.2
-    # via pre-commit
-virtualenv==20.4.3
-    # via pre-commit
diff --git a/cq/landmines/landmine.77 b/cq/landmines/landmine.77
new file mode 100644
index 0000000..f8f9392
--- /dev/null
+++ b/cq/landmines/landmine.77
@@ -0,0 +1,4 @@
+Landmine because of inconsistent Java versions of the current container image
+and the candidate image (provided by autoroller).
+
+http://b/353584129
diff --git a/cq/landmines/landmine.78 b/cq/landmines/landmine.78
new file mode 100644
index 0000000..2fcce47
--- /dev/null
+++ b/cq/landmines/landmine.78
@@ -0,0 +1,3 @@
+Landmine becasue the bla4/2rs4/xua4 toolchain change
+
+http://b/358604827
diff --git a/cq/landmines/landmine.79 b/cq/landmines/landmine.79
new file mode 100644
index 0000000..cb16d38
--- /dev/null
+++ b/cq/landmines/landmine.79
@@ -0,0 +1,3 @@
+Landmine for build flag changes in CastLite speaker builds
+
+b/363167825
diff --git a/cq/landmines/landmine.80 b/cq/landmines/landmine.80
new file mode 100644
index 0000000..09c30bd
--- /dev/null
+++ b/cq/landmines/landmine.80
@@ -0,0 +1,3 @@
+Landmine for build flag changes in speaker builds
+
+b/351960189
diff --git a/cq/scripts/helpers/git_utils.py b/cq/scripts/helpers/git_utils.py
index 87c420b..0ad2d03 100644
--- a/cq/scripts/helpers/git_utils.py
+++ b/cq/scripts/helpers/git_utils.py
@@ -2,6 +2,7 @@
 
 from __future__ import absolute_import
 import collections
+import copy
 import os
 import re
 import six
@@ -93,20 +94,25 @@
   #   "some_other_project": 1
   # }
   project_depths = collections.Counter(
-      patch['project'] for patch in patches if patch['project'] in path_lookup)
-
+      (patch['project'], patch['branch']) for patch in patches
+      if (patch['project'], patch['branch']) in path_lookup)
   # |path_lookup| dictionary uses real paths relative to the root checkout
   # directory. However, some tools like gn expect paths to be symlinks, e.g.
   # chromium/src/chromecast/internal/receiver/app/dummy.cc instead of
   # cast/internal/receiver/app/dummy.cc. Create a dictionary here so that
   # symlinks can be returned for some projects.
-  file_prefix_dict = {
-      "chromecast/internal": "chromium/src/chromecast/internal"
-  }
-  file_prefix_dict = path_lookup.get_dict() | file_prefix_dict
+  file_prefix_dict = copy.deepcopy(path_lookup)
+  replacing_symlinks = {'chromecast/internal':
+                        'chromium/src/chromecast/internal'}
+  for project, symlink in replacing_symlinks.items():
+    if file_prefix_dict._lookup_dict.get(project):
+      update_dict = {(project, revision): symlink
+                     for revision in file_prefix_dict._lookup_dict[project]}
+      file_prefix_dict.update(update_dict)
 
-  # For each project, calculate the list of changed files, concatenate each file
-  # path with the project's path from checkout root, and append them to a list.
+  # For each project, calculate the list of changed files, concatenate each
+  # file path with the project's path from checkout root, and append them to
+  # a list.
   files = []
   for project in project_depths.keys():
     depth = project_depths[project]
@@ -144,7 +150,7 @@
   return filenames
 
 
-def commit_message(executor, directory=None):
+def commit_message(executor, directory=None, skip_counts: int=0) -> str:
   """Returns the most recent git commit message.
 
   Note that this does not include the Author: or Date: fields,
@@ -153,11 +159,12 @@
   Args:
     executor: Executes the subprocess.
     directory: Directory to execute the subprocess in (defaults to cwd)
+    skip_counts: Number of commits to skip before returning the message.
 
   Returns:
     The most recent git commit message.
   """
-  return executor.exec_subprocess(['git', 'log', '--format=%B', '-n', '1'],
+  return executor.exec_subprocess(['git', 'log', '--format=%B', '--skip', str(skip_counts), '-n', '1'],
                                   cwd=directory,
                                   check_output=True)
 
@@ -201,11 +208,7 @@
 
 
 def get_head_sha(executor, directory):
-  """Returns the SHA of the HEAD revision for the given directory,
-
-  or None if the directory specified is not a git repository
-  """
-
+  """Returns the SHA of the HEAD revision of the given directory, else None."""
   cmd_get_sha = ['git', 'rev-parse', 'HEAD']
   returncode, stdout, _ = executor.exec_subprocess(cmd_get_sha, cwd=directory)
   return stdout.strip() if returncode == 0 else None
diff --git a/cq/scripts/helpers/git_utils_test.py b/cq/scripts/helpers/git_utils_test.py
index 4e4a812..31550dc 100755
--- a/cq/scripts/helpers/git_utils_test.py
+++ b/cq/scripts/helpers/git_utils_test.py
@@ -95,16 +95,25 @@
   def testGetAllFiles(self):
     """Test getting all files affected by a group of patches."""
     patches = [
-        {'project': 'chromium/src', 'patchset_number': 12345},
-        {'project': 'chromium/src', 'patchset_number': 67890},
-        {'project': 'chromecast/internal', 'patchset_number': 34567},
-        {'project': 'test', 'patchset_number': 89012},
-        {'project': 'project_with_no_path', 'patchset_number': 1}]
+        {'project': 'chromium/src', 'patchset_number': 12345,
+         'branch': repo_utils.DEFAULT_REVISION},
+        {'project': 'chromium/src', 'patchset_number': 67890,
+         'branch': repo_utils.DEFAULT_REVISION},
+        {'project': 'chromium/src', 'patchset_number': 95271,
+         'branch': 'branch1'},
+        {'project': 'chromecast/internal', 'patchset_number': 34567,
+         'branch': 'branch2'},
+        {'project': 'test', 'patchset_number': 89012,
+         'branch': repo_utils.DEFAULT_REVISION},
+        {'project': 'project_with_no_path', 'patchset_number': 1,
+         'branch': repo_utils.DEFAULT_REVISION}]
 
-    path_lookup = repo_utils.ProjectLookupTable({
+    path_lookup = repo_utils._ProjectLookupTable()
+    path_lookup.update({
         'chromium/src': 'chromium/src',
-        'chromecast/internal': 'cast/internal',
-        'test': 'test'})
+        ('chromecast/internal', 'branch2'): 'cast/internal',
+        'test': 'test',
+    })
 
     def handle_git_revlist(cmd, cwd):
       if cmd == ['git', 'rev-list', '--count', 'HEAD']:
@@ -147,6 +156,49 @@
     self.assertFalse(git_utils.is_sha('master'))
     self.assertFalse(git_utils.is_sha('unfork_m52'))
 
+  def testGetAllFilesOverwritesWithSymlinks(self):
+    """Test getting all files affected by a group of patches."""
+    patches = [
+        {'project': 'chromecast/internal', 'patchset_number': 34567,
+         'branch': 'branch1'}]
+
+    path_lookup = repo_utils._ProjectLookupTable()
+    path_lookup.update({
+        ('chromecast/internal', 'branch1'): 'cast/internal',
+    })
+
+    def handle_git_revlist(cmd, unused_cwd):
+      if cmd == ['git', 'rev-list', '--count', 'HEAD']:
+        return (0, '200', '')
+      return None
+
+    def handle_git_diff(cmd, cwd):
+      if cmd[:2] != ['git', 'diff'] or '--name-only' not in cmd:
+        return None
+      if cwd == 'cast/internal':
+        self.assertTrue('HEAD~1' in cmd)
+        files = ['receiver/client.h', 'base/logging.h']
+      else:
+        return (0, '', '')
+      return (0, '\n'.join(files), '')
+
+    executor = MockExecutor()
+    executor.add_handler(handle_git_revlist)
+    executor.add_handler(handle_git_diff)
+    all_files = git_utils.get_changed_files_in_all_patches(
+        executor, patches, path_lookup)
+    self.assertEqual(
+      set(['chromium/src/chromecast/internal/receiver/client.h',
+           'chromium/src/chromecast/internal/base/logging.h',]),
+        set(all_files))
+
+  def testIsRevisionASha(self):
+    """Tests cases for git_utils.is_sha."""
+    self.assertTrue(
+        git_utils.is_sha('2fe8f3b110d8b117698265e18e432f32837247bb'))
+    self.assertFalse(git_utils.is_sha('master'))
+    self.assertFalse(git_utils.is_sha('unfork_m52'))
+
 
 if __name__ == '__main__':
   unittest.main()
diff --git a/cq/scripts/helpers/repo_utils.py b/cq/scripts/helpers/repo_utils.py
index d10a2b0..0a687e7 100644
--- a/cq/scripts/helpers/repo_utils.py
+++ b/cq/scripts/helpers/repo_utils.py
@@ -1,17 +1,19 @@
 """Tools to interact with repo."""
 
 from __future__ import absolute_import
-from contextlib import contextmanager
+import contextlib
 import logging
 import os
 import shutil
 import tempfile
+from typing import Any, Mapping
 from xml.dom import minidom
 import xml.etree.ElementTree as et
 
 import six
 
 DEFAULT_REMOTE = 'eureka'
+DEFAULT_REVISION = 'DEFAULT'
 
 EUREKA_MANIFEST_PROJECT_NAME = 'eureka/manifest'
 LIBASSISTANT_MANIFEST_PROJECT_NAME = 'standalone/manifest'
@@ -98,28 +100,20 @@
   """Error if unable to parse output of 'repo manifest' command."""
 
 
-class ProjectLookupTable(object):
+class _ProjectLookupTable(object):
   """A lookup table for project and project paths."""
 
-  def __init__(self, lookup_dict=None, duplicate_keys=None):
-    """Create a ProjectLookupTable.
-
-    Args:
-      lookup_dict: Dictionary of key/value pairs.
-      duplicate_keys: A list of keys that were duplicates.
-    """
-    if not lookup_dict:
-      lookup_dict = {}
-    if not duplicate_keys:
-      duplicate_keys = []
-    self._lookup_dict = lookup_dict
-    self._duplicate_keys = duplicate_keys
+  def __init__(self):
+    """Create a ProjectLookupTable."""
+    self._lookup_dict = {}
+    self._duplicate_keys = set()
+    self._update = False
 
   def __getitem__(self, key):
     """Get the value for the given key.
 
     Args:
-      key: The key to get a value for.
+      key: The key to get a value for: project or (project, revision).
 
     Returns:
       The value for the given key.
@@ -131,20 +125,38 @@
     return self.get(key)
 
   def __setitem__(self, key, value):
-    """Sets the value for the given key."""
-    if self._lookup_dict.get(key):
-      self._duplicate_keys.append(key)
-    self._lookup_dict[key] = value
+    """Sets the value for the given keys.
+
+    Args:
+      key: (project, revision) or project.
+      value: The value.
+    """
+    if not isinstance(key, tuple):
+      project = key
+      revision = DEFAULT_REVISION
+    else:
+      project, revision = key
+    if self._lookup_dict.get(project, {}).get(revision) and not self._update:
+      self._duplicate_keys.add((project, revision))
+
+    if project not in self._lookup_dict:
+      self._lookup_dict[project] = {}
+    self._lookup_dict[project][revision] = value
 
   def __contains__(self, key):
     """Returns true if key is in the underlying dict."""
-    return key in self._lookup_dict
+    try:
+      return bool(self.get(key))
+    except DuplicateLookupError:
+      return True
+    except KeyError:
+      return False
 
   def get(self, key):
     """Get the value for the given key.
 
     Args:
-      key: The key to get a value for.
+      key: The key to get a value for: project or (project, revision).
 
     Returns:
       The value for the given key.
@@ -153,25 +165,45 @@
       KeyError: If the key is not included.
       DuplicateLookupError: If the key was included multiple times.
     """
-    if key in self._duplicate_keys:
+    revision_unspecified = not isinstance(key, tuple)
+    if revision_unspecified:
+      project = key
+      revision = DEFAULT_REVISION
+    else:
+      project, revision = key
+    if (project, revision) in self._duplicate_keys:
       raise DuplicateLookupError(
-          'Duplicate of {} found in manifest.'.format(key))
+          f'Duplicate of {str((project, revision))} found in manifest.'
+          'If the same repository has been pulled more than once,'
+          'please explicitly specify the revision in the manifest.')
 
-    if key in self._lookup_dict:
-      return self._lookup_dict[key]
+    if project in self._lookup_dict:
+      revisions = self._lookup_dict[project]
 
-    if key.endswith('/') and key[:-1] in self._lookup_dict:
-      return self._lookup_dict[key[:-1]]
+    elif project.endswith('/') and project[:-1] in self._lookup_dict:
+      revisions = self._lookup_dict[project[:-1]]
 
     # For legacy reasons, some project names in the manifest include a '.git'
     # suffix. Gerrit strips off this suffix, though, so if we haven't found the
     # key yet, try searching with a .git suffix.
-    key_git = key + '.git'
-    if key_git in self._lookup_dict:
-      return self._lookup_dict[key_git]
+    elif project + '.git' in self._lookup_dict:
+      revisions = self._lookup_dict[project + '.git']
+    else:
+      raise KeyError('{} was not found in the project path lookup {}.'.format(
+          project, self._lookup_dict))
 
-    raise KeyError('{} was not found in the project path lookup {}.'.format(
-        key, self._lookup_dict))
+    assert revisions
+    if revision_unspecified:
+      if len(revisions) == 1:
+        return list(revisions.values())[0]
+      raise DuplicateLookupError(
+          f'There are multiple revisions of {project} being included in this'
+          f'manifest ({", ".join(revisions)}). Please query the table with '
+          f'tuple ({project}, revision)')
+    if revision in revisions:
+      return revisions[revision]
+    raise KeyError(f'{revision} was not found in the path lookup of {project}.'
+                   f' Available revisions: {", ".join(revisions)}')
 
   def get_dict(self):
     """Gets the dictionary representing the lookup table.
@@ -181,6 +213,14 @@
     """
     return self._lookup_dict
 
+  def update(self, mapping: Mapping[Any, str]):
+    try:
+      self._update = True
+      for k, v in mapping.items():
+        self[k] = v
+    finally:
+      self._update = False
+
 
 def get_project_path_lookup(executor, manifest_url, manifest=None, **kwargs):
   """Returns a project path lookup table.
@@ -206,7 +246,7 @@
   return parse_manifest_for_path(manifest, manifest_url)
 
 
-@contextmanager
+@contextlib.contextmanager
 def normalize_manifest_if_exists(manifest_path):
   """Normalizes the manifest for the duration of the context manager."""
   if manifest_path:
@@ -358,7 +398,7 @@
     A ProjectLookupTable representing the manifest for the given project.
   """
   assert isinstance(manifest, six.string_types)
-  result = ProjectLookupTable()
+  result = _ProjectLookupTable()
 
   # Add an entry to this map representing the repo manifest project.
   manifest_project_name = get_manifest_url_mapping(
@@ -373,9 +413,13 @@
   for project in root.findall('project'):
     name = project.get('name')
     path = project.get('path')
+    # TODO(b/358371464): This only covers the explicit specified revision.
+    # This is just a workaround. Should cover all the cases one day.
+    revision = project.get('upstream') or project.get('revision')
     if not path:
       path = name
-    result[name] = path
+    key = (name, revision) if revision else name
+    result[key] = path
   return result
 
 
@@ -390,7 +434,7 @@
     A ProjectLookupTable representing the manifest for the given project.
   """
   assert isinstance(manifest, six.string_types)
-  result = ProjectLookupTable()
+  result = _ProjectLookupTable()
 
   # Add an entry to this map representing the repo manifest project.
   manifest_project_name = get_manifest_url_mapping(
@@ -427,7 +471,7 @@
     A ProjectLookupTable representing the manifest for the given project.
   """
   assert isinstance(manifest, six.string_types)
-  result = ProjectLookupTable()
+  result = _ProjectLookupTable()
 
   manifest_project_name = get_manifest_url_mapping(
       REPO_MANIFEST_URL_TO_PROJECT_NAME, manifest_url)
@@ -451,7 +495,9 @@
     revision = project.get('revision')
     if not revision:
       revision = default_revision
-    result[name] = revision
+
+    key = (name, revision) if revision else name
+    result[key] = revision
   return result
 
 
diff --git a/cq/scripts/helpers/repo_utils_test.py b/cq/scripts/helpers/repo_utils_test.py
index 1eec759..18ff0f3 100644
--- a/cq/scripts/helpers/repo_utils_test.py
+++ b/cq/scripts/helpers/repo_utils_test.py
@@ -10,7 +10,7 @@
 sys.path.insert(
     0,
     os.path.realpath(os.path.join(os.path.dirname(__file__), os.pardir)))
-from helpers import repo_utils
+from helpers import repo_utils  # pylint: disable=g-import-not-at-top
 
 SAMPLE_MANIFEST_STRING = """
 <?xml version="1.0" encoding="UTF-8"?>
@@ -100,6 +100,29 @@
 TEST_MANIFEST_URL = 'https://foo.bar.googlesource.com/bat/baz'
 
 
+_SAMPLE_MANIFEST_STRING_WITH_MULTI_REVISIONS_OF_SAME_NAMES = """
+<?xml version="1.0" encoding="UTF-8"?>
+<manifest>
+  <remote fetch="https://chromium.googlesource.com/" name="chromium" review="https://chromium-review.googlesource.com/"/>
+  <remote fetch="https://eureka-internal.googlesource.com/" name="eureka" review="sso://eureka-internal"/>
+
+  <project name="test" path="test1" remote="eureka" revision="branch1"/>
+  <project name="test" path="test2" remote="eureka" revision="branch2"/>
+</manifest>
+"""
+
+_SAMPLE_MANIFEST_WITH_UPSTREAMS_SET = """
+<?xml version="1.0" encoding="UTF-8"?>
+<manifest>
+  <remote fetch="https://chromium.googlesource.com/" name="chromium" review="https://chromium-review.googlesource.com/"/>
+  <remote fetch="https://eureka-internal.googlesource.com/" name="eureka" review="sso://eureka-internal"/>
+
+  <project name="test" path="test1" remote="eureka" revision="sha1" upstream="branch1"/>
+  <project name="test" path="test2" remote="eureka" revision="branch2"/>
+</manifest>
+"""
+
+
 class RepoUtilsTest(unittest.TestCase):
   """Tests for the repo utils module."""
 
@@ -111,8 +134,8 @@
     }
     project_lookup = repo_utils.parse_manifest_for_path(
         manifest, repo_utils.EUREKA_MANIFEST_URL)
-    lookup = project_lookup._lookup_dict
-    self.assertEqual(expected_lookup, lookup)
+    for key, expected_value in expected_lookup.items():
+      self.assertEqual(project_lookup[key], expected_value)
 
   def testParseManifest(self):
     """Tests parsing a manifest file for paths."""
@@ -130,8 +153,8 @@
     project_lookup = repo_utils.parse_manifest_for_path(
         SAMPLE_MANIFEST_STRING,
         repo_utils.EUREKA_MANIFEST_URL)
-    lookup = project_lookup._lookup_dict
-    self.assertEqual(expected_lookup, lookup)
+    for key, expected_value in expected_lookup.items():
+      self.assertEqual(project_lookup[key], expected_value)
 
   def testParseManifestForNonEurekaManifest(self):
     """Tests parsing a non-eureka manifest for path."""
@@ -146,14 +169,16 @@
         'prebuilt/toolchain': 'toolchain',
         'test': 'test'
     }
-    original_map = repo_utils.REPO_MANIFEST_URL_TO_PROJECT_NAME
-    repo_utils.REPO_MANIFEST_URL_TO_PROJECT_NAME = {TEST_MANIFEST_URL:
-                                                    'bat/baz'}
-    project_lookup = repo_utils.parse_manifest_for_path(
-        SAMPLE_MANIFEST_STRING, TEST_MANIFEST_URL)
-    lookup = project_lookup._lookup_dict
-    self.assertEqual(expected_lookup, lookup)
-    repo_utils.REPO_MANIFEST_URL_TO_PROJECT_NAME = original_map
+    try:
+      original_map = dict(repo_utils.REPO_MANIFEST_URL_TO_PROJECT_NAME)
+      repo_utils.REPO_MANIFEST_URL_TO_PROJECT_NAME = {TEST_MANIFEST_URL:
+                                                      'bat/baz'}
+      project_lookup = repo_utils.parse_manifest_for_path(
+          SAMPLE_MANIFEST_STRING, TEST_MANIFEST_URL)
+      for key, expected_value in expected_lookup.items():
+        self.assertEqual(project_lookup[key], expected_value)
+    finally:
+      repo_utils.REPO_MANIFEST_URL_TO_PROJECT_NAME = original_map
 
   def testParseManifestRemotes(self):
     """Tests parsing a manifest file for remotes."""
@@ -171,8 +196,8 @@
     project_lookup = repo_utils.parse_manifest_for_remote(
         SAMPLE_MANIFEST_STRING,
         repo_utils.EUREKA_MANIFEST_URL)
-    lookup = project_lookup._lookup_dict
-    self.assertEqual(expected_lookup, lookup)
+    for key, expected_value in expected_lookup.items():
+      self.assertEqual(project_lookup[key], expected_value)
 
   def testParseManifestRemotesForNonEurekaManifest(self):
     """Tests parsing a non-eureka manifest for remotes."""
@@ -187,14 +212,16 @@
         'prebuilt/toolchain': 'eureka',
         'test': 'eureka'
     }
-    original_map = repo_utils.REPO_MANIFEST_URL_TO_PROJECT_NAME
-    repo_utils.REPO_MANIFEST_URL_TO_PROJECT_NAME = {TEST_MANIFEST_URL:
-                                                    'bat/baz'}
-    project_lookup = repo_utils.parse_manifest_for_remote(
-        SAMPLE_MANIFEST_STRING, TEST_MANIFEST_URL)
-    lookup = project_lookup._lookup_dict
-    self.assertEqual(expected_lookup, lookup)
-    repo_utils.REPO_MANIFEST_URL_TO_PROJECT_NAME = original_map
+    try:
+      original_map = dict(repo_utils.REPO_MANIFEST_URL_TO_PROJECT_NAME)
+      repo_utils.REPO_MANIFEST_URL_TO_PROJECT_NAME = {TEST_MANIFEST_URL:
+                                                      'bat/baz'}
+      project_lookup = repo_utils.parse_manifest_for_remote(
+          SAMPLE_MANIFEST_STRING, TEST_MANIFEST_URL)
+      for key, expected_value in expected_lookup.items():
+        self.assertEqual(project_lookup[key], expected_value)
+    finally:
+      repo_utils.REPO_MANIFEST_URL_TO_PROJECT_NAME = original_map
 
   def testParseManifestRemotesForNonEurekaDefaultRemote(self):
     expected_lookup = {
@@ -202,8 +229,8 @@
     }
     project_lookup = repo_utils.parse_manifest_for_remote(
         SAMPLE_MANIFEST_WITH_DEFAULT_REMOTE, TEST_MANIFEST_URL)
-    lookup = project_lookup._lookup_dict
-    self.assertEqual(expected_lookup, lookup)
+    for key, expected_value in expected_lookup.items():
+      self.assertEqual(project_lookup[key], expected_value)
 
   def testParseManifestRevisions(self):
     """Tests parsing a manifest file for revisions."""
@@ -222,8 +249,8 @@
         SAMPLE_MANIFEST_STRING,
         repo_utils.EUREKA_MANIFEST_URL,
         'unfork_m51')
-    lookup = project_lookup._lookup_dict
-    self.assertEqual(expected_lookup, lookup)
+    for key, expected_value in expected_lookup.items():
+      self.assertEqual(project_lookup[key], expected_value)
 
   def testParseManifestWithDups(self):
     """Tests parsing a manifest file with duplicates."""
@@ -235,23 +262,22 @@
         'eureka/manifest': '.repo/manifests',
         'fragglerock': 'fragglerock',
         'prebuilt/toolchain': 'toolchain',
-        'test': 'test2'
     }
-    expected_duplicates = ['test']
+    expected_duplicate = 'test'
     project_lookup = repo_utils.parse_manifest_for_path(
         SAMPLE_MANIFEST_STRING_WITH_DUPLICATE_NAMES,
         repo_utils.EUREKA_MANIFEST_URL)
-    lookup = project_lookup._lookup_dict
-    duplicates = project_lookup._duplicate_keys
-    self.assertEqual(expected_lookup, lookup)
-    self.assertEqual(expected_duplicates, duplicates)
+    for key, expected_value in expected_lookup.items():
+      self.assertEqual(project_lookup[key], expected_value)
+    with self.assertRaises(repo_utils.DuplicateLookupError):
+      project_lookup[expected_duplicate]  # pylint: disable=pointless-statement
 
   def testLookupDuplicateRepoRaisesError(self):
     """Tests a lookup on a duplicate repo raises an error."""
 
     class MockExecutor(object):
 
-      # pylint: disable=invalid-name,no-method-argument
+      # pylint: disable=invalid-name,no-method-argument, no-self-argument
       def exec_subprocess(*args, **kwargs):
         del kwargs
         return SAMPLE_MANIFEST_STRING_WITH_DUPLICATE_NAMES
@@ -275,7 +301,7 @@
 
     class MockExecutor(object):
 
-      # pylint: disable=invalid-name,no-method-argument
+      # pylint: disable=invalid-name,no-method-argument, no-self-argument
       def exec_subprocess(*args, **kwargs):
         del kwargs
         return SAMPLE_MANIFEST_INCOMPLETE_STRING
@@ -349,12 +375,26 @@
     with repo_utils.normalize_manifest_if_exists(temp_manifest):
       with open(temp_manifest, 'r') as f:
         manifest_content = f.read()
-        self.assertEqual(manifest_content, SAMPLE_MANIFEST_WITH_EXTERNAL_GITHUB)
+        self.assertEqual(manifest_content,
+                         SAMPLE_MANIFEST_WITH_EXTERNAL_GITHUB)
     with open(temp_manifest, 'r') as f:
       manifest_content = f.read()
       self.assertEqual(manifest_content, SAMPLE_MANIFEST_WITH_INTERNAL_GITHUB)
     os.unlink(temp_manifest)
 
+  def testParseManifestWithSameNamesFromDifferentRevisions(self):
+    project_lookup = repo_utils.parse_manifest_for_path(
+        _SAMPLE_MANIFEST_STRING_WITH_MULTI_REVISIONS_OF_SAME_NAMES,
+        repo_utils.EUREKA_MANIFEST_URL)
+    self.assertEqual(project_lookup[('test', 'branch1')], 'test1')
+    self.assertEqual(project_lookup[('test', 'branch2')], 'test2')
+
+  def testParseManifestUseUpstreamFirstThenRevision(self):
+    project_lookup = repo_utils.parse_manifest_for_path(
+        _SAMPLE_MANIFEST_WITH_UPSTREAMS_SET, repo_utils.EUREKA_MANIFEST_URL)
+    self.assertEqual(project_lookup[('test', 'branch1')], 'test1')
+    self.assertEqual(project_lookup[('test', 'branch2')], 'test2')
+
 
 if __name__ == '__main__':
   unittest.main()
diff --git a/cq/scripts/slave/recipe/combined_sdk.py b/cq/scripts/slave/recipe/combined_sdk.py
index 7a8d896..363e5df 100644
--- a/cq/scripts/slave/recipe/combined_sdk.py
+++ b/cq/scripts/slave/recipe/combined_sdk.py
@@ -59,6 +59,25 @@
     ]
 }
 
+AMLOGIC_CAMERA_MAIN = {
+    'name': 'amlogic',
+    'build_script': 'sdk/amlogic/sdk/build_scripts/build_all.sh',
+    'fake_work_dir': FAKE_WORK_DIR,
+    'fake_prebuilt_dir': FAKE_PREBUILT_SUBDIR,
+    'prebuilt_file': AMLOGIC_PREBUILT_FILE,
+    'include_product_param': True,
+    'requirements_project': ('amlogic/sdk', 'camera-main'),
+    'requirements_path': 'requirements3.txt',
+    'oss_sdk_dir': OSS_SDK_DIR,
+    'oss_sdk_release_script': 'sdk/amlogic/sdk/build_scripts/release_oss.sh',
+    'build_logs_dir': BUILD_LOGS_DIR,
+    'purge_dirs': [
+        AMLOGIC_PREBUILT_FILE,
+        OSS_SDK_DIR,
+        FAKE_WORK_DIR,
+    ]
+}
+
 QUALCOMM_PREBUILT_FILE = 'qualcomm-prebuilt.tgz'
 
 QUALCOMM = {
@@ -113,6 +132,18 @@
         'vendor_config': AMLOGIC,
         'product': 'biggie',
     },
+    'combined-sdk-kernel-bootloader-amlogic-bla4': {
+        'vendor_config': AMLOGIC_CAMERA_MAIN,
+        'product': 'bla4',
+    },
+    'combined-sdk-kernel-bootloader-amlogic-2rs4': {
+        'vendor_config': AMLOGIC_CAMERA_MAIN,
+        'product': '2rs4',
+    },
+    'combined-sdk-kernel-bootloader-amlogic-xua4': {
+        'vendor_config': AMLOGIC_CAMERA_MAIN,
+        'product': 'xua4',
+    },
     'combined-sdk-kernel-bootloader-amlogic-boreal': {
         'vendor_config': AMLOGIC,
         'product': 'boreal',
diff --git a/cq/scripts/slave/recipe/fuchsia_builder.py b/cq/scripts/slave/recipe/fuchsia_builder.py
index d6b51f1..55c9242 100644
--- a/cq/scripts/slave/recipe/fuchsia_builder.py
+++ b/cq/scripts/slave/recipe/fuchsia_builder.py
@@ -148,8 +148,6 @@
     run_gtests = self._build_config.get('run_gtests')
     run_catatester = self._build_config.get('run_catatester', False)
     post_submit_upload = self.build_system == 'catabuilder'
-    self._fuchsia_sdk_root = os.path.join(chromium_root,
-                                          'third_party/fuchsia-sdk/')
     self._fuchsia_workdir = self.create_temp_directory()
 
     # Extend buildargs with incremental build id
@@ -204,14 +202,14 @@
       # Start FEMU
       femu_start_step = fuchsia_emulator_step.FuchsiaEmulatorStartStep(
           bucket=bucket,
-          fuchsia_sdk_root=self._fuchsia_sdk_root,
+          chromium_root=chromium_root,
+          out_dir=out_dir,
           fuchsia_workdir=self._fuchsia_workdir,
           **self._step_kwargs)
       steps.append(femu_start_step)
 
       step = fuchsia_unittest_step.FuchsiaUnitTestStep(
           out_dir=out_dir,
-          fuchsia_workdir=self._fuchsia_workdir,
           recipe_steps=steps,
           cwd=cwd,
           **self._step_kwargs)
@@ -281,8 +279,5 @@
       List of slave.base_step.BaseStep commands to be executed.
     """
     return [
-      fuchsia_emulator_step.FuchsiaEmulatorTeardownStep(
-            fuchsia_sdk_root=self._fuchsia_sdk_root,
-            fuchsia_workdir=self._fuchsia_workdir,
-            **self._step_kwargs)
+      fuchsia_emulator_step.FuchsiaEmulatorTeardownStep(**self._step_kwargs)
     ] + super().get_teardown_steps()
diff --git a/cq/scripts/slave/recipe/ota_builder.py b/cq/scripts/slave/recipe/ota_builder.py
index 0e52850..6f8be2e 100644
--- a/cq/scripts/slave/recipe/ota_builder.py
+++ b/cq/scripts/slave/recipe/ota_builder.py
@@ -378,9 +378,20 @@
         'resign': ota_resign_step.LUMIA_RESIGN,
         'partner_bucket': ['cast-partner-amlogic-internal'],
     },
+    'swift-demo-eng': {
+        'build_flavor': 'release',
+        'board_name': 'swift-p2',
+        'resign': ota_resign_step.SWIFT_RESIGN,
+        'partner_bucket': ['cast-partner-amlogic-internal'],
+    },
+    'swift-demo-user': {
+        'build_flavor': 'release',
+        'resign': ota_resign_step.SWIFT_RESIGN,
+        'partner_bucket': ['cast-partner-amlogic-internal'],
+    },
     'swift-eng': {
         'build_flavor': 'release',
-        'board_name': 'swift-p1',
+        'board_name': 'swift-p2',
         'resign': ota_resign_step.SWIFT_RESIGN,
         'partner_bucket': ['cast-partner-amlogic-internal'],
     },
@@ -389,6 +400,23 @@
         'resign': ota_resign_step.SWIFT_RESIGN,
         'partner_bucket': ['cast-partner-amlogic-internal'],
     },
+    'goertek-9eh4-partner-eng': {
+        'build_name': 'swift-partner-eng',
+        'build_flavor': 'release',
+        'build_extra_params': ['PARTNER_BUILD=true', 'CORE_DEV_BUILD=true'],
+        # Disable build accelerator because
+        # JDM doesn't have the ability to enable RBE.
+        'enable_build_accelerator': False,
+        'resign': ota_resign_step.GTK_9EH4_RESIGN,
+        'factory': ota_resign_step.GTK_9EH4_FACTORY,
+        'factory_extra_params': [
+            'CORE_DEV_BUILD=true',
+            'FCT_DEVICE_DIR=vendor/goertek/swift',
+            'FCT_FILES_PATH=/home/goertek/bin'
+        ],
+        'skip_ota_tests': True,
+        'skip_gncheck': True,
+    },
     'gq-eng': {
         'build_flavor': 'release',
         'resign': ota_resign_step.GQ_RESIGN,
@@ -401,6 +429,21 @@
         'check_camera_targets': True,
         'eng_build': True,
     },
+    'gq-skip_gncheck-eng': {
+        'build_name': 'gq-eng',
+        'build_flavor': 'release',
+        'resign': ota_resign_step.GQ_RESIGN,
+        'factory': ota_resign_step.GQ_FACTORY,
+        'usbstick': ota_resign_step.GQ_USBSTICK,
+        'partner_bucket': ['cast-partner-amlogic-internal'],
+        'branch_extra_params': {
+            '1.49qfsi_retail': ['RETAIL_DEMO_BUILD=true'],
+        },
+        'check_camera_targets': True,
+        'eng_build': True,
+        'skip_gncheck': True,
+        'build_extra_params': ['ENABLE_GN_CHECK=1'],
+    },
     'gq-user': {
         'build_flavor': 'release',
         'resign': ota_resign_step.GQ_RESIGN,
@@ -512,6 +555,18 @@
         'check_camera_targets': True,
         'eng_build': True,
     },
+    'nq-skip_gncheck-eng': {
+        'build_name': 'nq-eng',
+        'build_flavor': 'release',
+        'resign': ota_resign_step.NQ_RESIGN,
+        'factory': ota_resign_step.NQ_FACTORY,
+        'usbstick': ota_resign_step.NQ_USBSTICK,
+        'partner_bucket': ['cast-partner-amlogic-internal'],
+        'check_camera_targets': True,
+        'eng_build': True,
+        'skip_gncheck': True,
+        'build_extra_params': ['ENABLE_GN_CHECK=1'],
+    },
     'nq-user': {
         'build_flavor': 'release',
         'resign': ota_resign_step.NQ_RESIGN,
@@ -612,6 +667,7 @@
             'cast-partner-marvell-internal',
             'cast-partner-synaptics-internal',
         ],
+        'make_clean': True,
     },
     'prince-iot_cast-user': {
         'build_flavor':
@@ -626,6 +682,7 @@
             'cast-partner-marvell-internal',
             'cast-partner-synaptics-internal',
         ],
+        'make_clean': True,
     },
     'prince-eng': {
         'build_flavor':
@@ -715,6 +772,19 @@
         'check_camera_targets': True,
         'eng_build': True,
     },
+    'spencer-skip_gncheck-eng': {
+        'build_name': 'spencer-eng',
+        'build_flavor': 'release',
+        'resign': ota_resign_step.SPENCER_RESIGN,
+        'factory': ota_resign_step.SPENCER_FACTORY,
+        'usbstick': ota_resign_step.SPENCER_USBSTICK,
+        'factory_tool': ota_resign_step.SPENCER_FACTORY_TOOL,
+        'partner_bucket': ['cast-partner-amlogic-internal'],
+        'check_camera_targets': True,
+        'eng_build': True,
+        'skip_gncheck': True,
+        'build_extra_params': ['ENABLE_GN_CHECK=1'],
+    },
     'spencer-user': {
         'build_flavor': 'release',
         'resign': ota_resign_step.SPENCER_RESIGN,
@@ -814,6 +884,7 @@
             'cast-partner-marvell-internal',
             'cast-partner-synaptics-internal',
         ],
+        'make_clean': True,
     },
     'valens-iot_cast-user': {
         'build_flavor':
@@ -830,6 +901,7 @@
             'cast-partner-marvell-internal',
             'cast-partner-synaptics-internal',
         ],
+        'make_clean': True,
     },
     'valens-eng': {
         'build_flavor':
@@ -922,6 +994,19 @@
         'check_camera_targets': True,
         'eng_build': True,
     },
+    'venus-skip_gncheck-eng': {
+        'build_name': 'venus-eng',
+        'build_flavor': 'release',
+        'resign': ota_resign_step.VENUS_RESIGN,
+        'factory': ota_resign_step.VENUS_FACTORY,
+        'factory_tool': ota_resign_step.VENUS_FACTORY_TOOL,
+        'usbstick': ota_resign_step.VENUS_USBSTICK,
+        'partner_bucket': ['cast-partner-amlogic-internal'],
+        'check_camera_targets': True,
+        'eng_build': True,
+        'skip_gncheck': True,
+        'build_extra_params': ['ENABLE_GN_CHECK=1'],
+    },
     'venus-user': {
         'build_flavor': 'release',
         'resign': ota_resign_step.VENUS_RESIGN,
@@ -952,19 +1037,156 @@
         # It's because the toolchain is different from other products.
         # Therefore, make clean to avoid using the same intermediate artifacts.
         'partner_bucket': ['cast-partner-amlogic-internal'],
-        'make_clean': True,
+        'skip_ota_tests': True,
     },
     'bla4-eng': {
         'build_flavor': 'release',
         'resign': ota_resign_step.BLA4_RESIGN,
         'partner_bucket': ['cast-partner-amlogic-internal'],
-        'make_clean': True,
     },
     'bla4-user': {
         'build_flavor': 'release',
         'resign': ota_resign_step.BLA4_RESIGN,
         'partner_bucket': ['cast-partner-amlogic-internal'],
-        'make_clean': True,
+    },
+    '2rs4-partner-eng': {
+        'build_flavor': 'release',
+        'build_extra_params': ['PARTNER_BUILD=true', 'CORE_DEV_BUILD=true'],
+        'resign': ota_resign_step._2RS4_RESIGN,
+        'partner_bucket': ['cast-partner-amlogic-internal'],
+        'skip_ota_tests': True,
+    },
+    '2rs4-eng': {
+        'build_flavor': 'release',
+        'resign': ota_resign_step._2RS4_RESIGN,
+        'partner_bucket': ['cast-partner-amlogic-internal'],
+    },
+    '2rs4-user': {
+        'build_flavor': 'release',
+        'resign': ota_resign_step._2RS4_RESIGN,
+        'partner_bucket': ['cast-partner-amlogic-internal'],
+    },
+    'xua4-partner-eng': {
+        'build_flavor': 'release',
+        'build_extra_params': ['PARTNER_BUILD=true', 'CORE_DEV_BUILD=true'],
+        'resign': ota_resign_step.XUA4_RESIGN,
+        'partner_bucket': ['cast-partner-amlogic-internal'],
+        'skip_ota_tests': True,
+    },
+    'xua4-eng': {
+        'build_flavor': 'release',
+        'resign': ota_resign_step.XUA4_RESIGN,
+        'partner_bucket': ['cast-partner-amlogic-internal'],
+    },
+    'xua4-user': {
+        'build_flavor': 'release',
+        'resign': ota_resign_step.XUA4_RESIGN,
+        'partner_bucket': ['cast-partner-amlogic-internal'],
+    },
+    'amlogic-bla4-partner-eng': {
+        'build_name': 'bla4-partner-eng',
+        'build_flavor': 'release',
+        'build_extra_params': ['PARTNER_BUILD=true', 'CORE_DEV_BUILD=true'],
+        # Disable build accelerator because
+        # JDM doesn't have the ability to enable RBE.
+        'enable_build_accelerator': False,
+        'resign': ota_resign_step.BLA4_RESIGN,
+        'skip_ota_tests': True,
+        'skip_gncheck': True,
+    },
+    'infineon-bla4-partner-eng': {
+        'build_name': 'bla4-partner-eng',
+        'build_flavor': 'release',
+        'build_extra_params': ['PARTNER_BUILD=true', 'CORE_DEV_BUILD=true'],
+        # Disable build accelerator because
+        # JDM doesn't have the ability to enable RBE.
+        'enable_build_accelerator': False,
+        'resign': ota_resign_step.BLA4_RESIGN,
+        'skip_ota_tests': True,
+        'skip_gncheck': True,
+    },
+    'sercomm-bla4-partner-eng': {
+        'build_name': 'bla4-partner-eng',
+        'build_flavor': 'release',
+        'build_extra_params': ['PARTNER_BUILD=true', 'CORE_DEV_BUILD=true'],
+        # Disable build accelerator because
+        # JDM doesn't have the ability to enable RBE.
+        'enable_build_accelerator': False,
+        'resign': ota_resign_step.BLA4_RESIGN,
+        'factory': ota_resign_step.BLA4_FACTORY,
+        'factory_extra_params': ['CORE_DEV_BUILD=true', 'FCT_DEVICE_DIR=vendor/sercomm/bla4', 'FCT_FILES_PATH=/home/sercomm/bin'],
+        'skip_ota_tests': True,
+        'skip_gncheck': True,
+    },
+    'amlogic-2rs4-partner-eng': {
+        'build_name': '2rs4-partner-eng',
+        'build_flavor': 'release',
+        'build_extra_params': ['PARTNER_BUILD=true', 'CORE_DEV_BUILD=true'],
+        # Disable build accelerator because
+        # JDM doesn't have the ability to enable RBE.
+        'enable_build_accelerator': False,
+        'resign': ota_resign_step._2RS4_RESIGN,
+        'skip_ota_tests': True,
+        'skip_gncheck': True,
+    },
+    'infineon-2rs4-partner-eng': {
+        'build_name': '2rs4-partner-eng',
+        'build_flavor': 'release',
+        'build_extra_params': ['PARTNER_BUILD=true', 'CORE_DEV_BUILD=true'],
+        # Disable build accelerator because
+        # JDM doesn't have the ability to enable RBE.
+        'enable_build_accelerator': False,
+        'resign': ota_resign_step._2RS4_RESIGN,
+        'skip_ota_tests': True,
+        'skip_gncheck': True,
+    },
+    'sercomm-2rs4-partner-eng': {
+        'build_name': '2rs4-partner-eng',
+        'build_flavor': 'release',
+        'build_extra_params': ['PARTNER_BUILD=true', 'CORE_DEV_BUILD=true'],
+        # Disable build accelerator because
+        # JDM doesn't have the ability to enable RBE.
+        'enable_build_accelerator': False,
+        'resign': ota_resign_step._2RS4_RESIGN,
+        'factory': ota_resign_step._2RS4_FACTORY,
+        'factory_extra_params': ['CORE_DEV_BUILD=true', 'FCT_DEVICE_DIR=vendor/sercomm/2rs4', 'FCT_FILES_PATH=/home/sercomm/bin'],
+        'skip_ota_tests': True,
+        'skip_gncheck': True,
+    },
+    'amlogic-xua4-partner-eng': {
+        'build_name': 'xua4-partner-eng',
+        'build_flavor': 'release',
+        'build_extra_params': ['PARTNER_BUILD=true', 'CORE_DEV_BUILD=true'],
+        # Disable build accelerator because
+        # JDM doesn't have the ability to enable RBE.
+        'enable_build_accelerator': False,
+        'resign': ota_resign_step.XUA4_RESIGN,
+        'skip_ota_tests': True,
+        'skip_gncheck': True,
+    },
+    'infineon-xua4-partner-eng': {
+        'build_name': 'xua4-partner-eng',
+        'build_flavor': 'release',
+        'build_extra_params': ['PARTNER_BUILD=true', 'CORE_DEV_BUILD=true'],
+        # Disable build accelerator because
+        # JDM doesn't have the ability to enable RBE.
+        'enable_build_accelerator': False,
+        'resign': ota_resign_step.XUA4_RESIGN,
+        'skip_ota_tests': True,
+        'skip_gncheck': True,
+    },
+    'sercomm-xua4-partner-eng': {
+        'build_name': 'xua4-partner-eng',
+        'build_flavor': 'release',
+        'build_extra_params': ['PARTNER_BUILD=true', 'CORE_DEV_BUILD=true'],
+        # Disable build accelerator because
+        # JDM doesn't have the ability to enable RBE.
+        'enable_build_accelerator': False,
+        'resign': ota_resign_step.XUA4_RESIGN,
+        'factory': ota_resign_step.XUA4_FACTORY,
+        'factory_extra_params': ['CORE_DEV_BUILD=true', 'FCT_DEVICE_DIR=vendor/sercomm/xua4', 'FCT_FILES_PATH=/home/sercomm/bin'],
+        'skip_ota_tests': True,
+        'skip_gncheck': True,
     },
 }
 
@@ -1011,10 +1233,14 @@
   """Recipe to run an ota build."""
 
   def __init__(self, build_name, build_config, **kwargs):
-    base_recipe.BaseRecipe.__init__(
-        self, enable_build_accelerator=True, **kwargs)
     self._build_name = build_name
     self._build_config = build_config
+    enable_build_accelerator = True
+    if 'enable_build_accelerator' in self._build_config:
+        enable_build_accelerator = \
+            self._build_config['enable_build_accelerator']
+    base_recipe.BaseRecipe.__init__(
+        self, enable_build_accelerator=enable_build_accelerator, **kwargs)
     # Extracts default_properties from build config and adds it to the overall
     # properties.
     default_properties = build_config.get('default_properties', {})
@@ -1022,9 +1248,19 @@
         list(default_properties.items()) +
         list(self._step_kwargs.get('properties', {}).items()))
     self._skip_landmine = self.build_system == 'catabuilder'
-    self._skip_gncheck = self.build_system == 'catabuilder'
 
-    self._make_clean = self._build_config.get('make_clean')
+    if self.build_system == 'catabuilder':
+        self._skip_gncheck = True
+    elif 'skip_gncheck' in self._build_config:
+        self._skip_gncheck = self._build_config['skip_gncheck']
+    else:
+        self._skip_gncheck = False
+
+    if 'make_clean' in self._build_config:
+      self._make_clean = self._build_config['make_clean']
+
+    if 'build_name' in self._build_config:
+        self._build_name = self._build_config['build_name']
 
   def _oss_run_condition(self):
     if self.build_system != 'catabuilder':
@@ -1069,6 +1305,11 @@
         self._manifest_branch, []
     )
 
+  def _fct_extra_params(self):
+    return self._build_config.get(
+        'factory_extra_params', []
+    )
+
   def get_clean_steps(self):
     """Builds a list of all clean steps that should be executed by this recipe.
 
@@ -1078,17 +1319,17 @@
     return [make_clean_step.MakeCleanStep(**self._step_kwargs)]
 
   def _camera_target_check_condition(self):
-    return (BUILD_CONFIGS[self._build_name].get('check_camera_targets', False)
+    return (self._build_config.get('check_camera_targets', False)
             and branch_utils.is_branch_equal_to_or_later_than(
                 self._manifest_branch, CAMERA_TARGET_CHECK_MIN_BRANCH))
 
   def _builds_usonia_daemon(self):
-    return (BUILD_CONFIGS[self._build_name].get('build_usonia_daemon', False)
+    return (self._build_config.get('build_usonia_daemon', False)
             and branch_utils.is_branch_equal_to_or_later_than(
                 self._manifest_branch, USONIA_DAEMON_CHECK_MIN_BRANCH))
 
   def _builds_thread_telemetry_service_cros(self):
-    return (BUILD_CONFIGS[self._build_name].get(
+    return (self._build_config.get(
         'build_thread_telemetry_service_cros', False) and
             branch_utils.is_branch_equal_to_or_later_than(
                 self._manifest_branch, USONIA_DAEMON_CHECK_MIN_BRANCH))
@@ -1112,6 +1353,7 @@
             self._build_name,
             board_name=board_name,
             extra_params=self._ota_extra_params(),
+            fct_extra_params=self._fct_extra_params(),
             halt_on_failure=True,
             max_ota_size=max_ota_size,
             target_name=target_name,
@@ -1119,7 +1361,7 @@
 
     if self._camera_target_check_condition():
       product_name, variant = self._build_name.split('-')
-      is_eng_build = BUILD_CONFIGS[self._build_name].get('eng_build', False)
+      is_eng_build = self._build_config.get('eng_build', False)
       steps.append(
           ota_step.OtaCameraTargetsStep(product_name, variant, is_eng_build,
                                         **self._step_kwargs))
@@ -1153,6 +1395,7 @@
           ota_fct_step.OtaFctCompileStep(
               self._build_name,
               self._build_config.get('factory', {}),
+              fct_extra_params=self._fct_extra_params(),
               halt_on_failure=True,
               **self._step_kwargs))
       # Generation of image/board specific FCT images performed by the same
@@ -1179,6 +1422,7 @@
               self._build_name,
               self._build_config.get('usbstick', {}),
               name='build usbstick images',
+              fct_extra_params=self._fct_extra_params(),
               halt_on_failure=True,
               **self._step_kwargs))
       steps.extend(
diff --git a/cq/scripts/slave/recipe/precommit.py b/cq/scripts/slave/recipe/precommit.py
index c75b619..8b3c1e4 100644
--- a/cq/scripts/slave/recipe/precommit.py
+++ b/cq/scripts/slave/recipe/precommit.py
@@ -7,12 +7,13 @@
 
 This recipe looks for the config file and runs pre-commit.
 """
+import copy
 import logging
 import os
 
+from collections import Counter
 from slave import base_recipe
 from slave.step import precommit_step
-from slave.step import py_env_setup_step
 
 CONFIG_FILE_DEFAULT = ".pre-commit-config.yaml"
 CONFIG_FILE_BETA = ".pre-commit-config.beta.yaml"
@@ -42,16 +43,26 @@
     self._repo_root_directory = kwargs["eureka_root"]
     self._show_output = show_output
 
+    properties = copy.deepcopy(kwargs["properties"])
+    self._patch_project_counter = Counter()
+    if "patch_project" not in properties:
+      raise ValueError('patch_project is a required properties attribute.')
+    self._patch_project_counter[properties["patch_project"]] += 1
+    if "depends_on" not in properties:
+      properties["depends_on"] = []
+    for depend in properties["depends_on"]:
+      if "project" not in depend:
+        raise ValueError("project is a required attribute from objects in depends_on list.")
+      self._patch_project_counter[depend["project"]] += 1
+
   def get_steps(self):
-    return [
-      py_env_setup_step.PyEnvSetupStep(
-        py3=True,
-        requirements_project="continuous-tests",
-        requirements_path="configs/pre-commit/requirements.txt",
-        **self._step_kwargs),
-      precommit_step.PreCommitStep(
+    result = []
+    for patch_project in self._patch_project_counter:
+      result.append(precommit_step.PreCommitStep(
         config_file=self._config_file,
         repo_root_directory=self._repo_root_directory,
         show_output=self._show_output,
-        **self._step_kwargs),
-    ]
+        patch_project=patch_project,
+        commit_counts=self._patch_project_counter[patch_project],
+        **self._step_kwargs))
+    return result
diff --git a/cq/scripts/slave/recipe/x86_simulator_builder.py b/cq/scripts/slave/recipe/x86_simulator_builder.py
index 59e42c8..8ee6737 100644
--- a/cq/scripts/slave/recipe/x86_simulator_builder.py
+++ b/cq/scripts/slave/recipe/x86_simulator_builder.py
@@ -26,14 +26,14 @@
 
 
 def CreateRecipe(build_name: str, **kwargs):
-  del build_name
-  return X86SimulatorRecipe(**kwargs)
+  return X86SimulatorRecipe(build_name, **kwargs)
 
 
 class X86SimulatorRecipe(base_recipe.BaseRecipe):
   """ Recipe for build x86 64 simulator"""
 
-  def __init__(self, **kwargs):
+  def __init__(self, build_name, **kwargs):
+    self._build_name = build_name
     base_recipe.BaseRecipe.__init__(
         self, enable_build_accelerator=True, **kwargs)
     self._code_coverage = self._properties.get('code_coverage', False)
@@ -48,7 +48,8 @@
       'product': product
     })
 
-    self.out_dir = f'out_chromecast_{product}_x86_64/release'
+    _, variant = self._build_name.split('-')
+    self.out_dir = f'out_chromecast_{product}_x86_64_{variant}/release'
     self.chromium_out_dir = f'chromium/src/{self.out_dir}'
 
   def get_steps(self):
diff --git a/cq/scripts/slave/step/fuchsia_emulator_step.py b/cq/scripts/slave/step/fuchsia_emulator_step.py
index 623bcf7..48fbac9 100644
--- a/cq/scripts/slave/step/fuchsia_emulator_step.py
+++ b/cq/scripts/slave/step/fuchsia_emulator_step.py
@@ -4,234 +4,68 @@
 import os
 import shutil
 import socket
-
+import subprocess
+import time
 from unittest import mock
 
 from slave import base_step
 
 
-_STEP_DATA_FEMU_CAST = 'step_data_femu_cast'
-_STEP_DATA_FEMU_HTTP = 'step_data_femu_http'
-_STEP_DATA_FEMU_HTTPS = 'step_data_femu_https'
-_STEP_DATA_FEMU_SSH = 'step_data_femu_ssh'
-_STEP_DATA_FEMU_LOG = 'step_data_femu_log'
-_STEP_DATA_PACKAGE_SERVER_LOG = 'step_data_package_server_log'
-
-_FFX_LOG = 'ffx.log'
-_FFX_DAEMON_LOG = 'ffx.daemon.log'
-
-_SSH_CONFIG_TEMPLATE = '''
-# Generated by Eureka CQ FuchsiaEmulatorStep using ffx emu.
-# Ports are changed to an available TCP port assigned to QEMU.
-Host *
-  Port {port}
-  StrictHostKeyChecking no
-  IdentityFile {auth_path}
-  ControlPersist 1m
-  ControlMaster auto
-  ControlPath /tmp/ssh-%r@%h:%p
-  ServerAliveInterval 1
-  ServerAliveCountMax 3
-  ConnectTimeout 5
-'''
-
-class FuchsiaEmulatorError(RuntimeError):
-  """Errors relating to FEMU."""
-
-class BaseFuchsiaEmulatorStep(base_step.BaseStep):
-  """BaseFuchsiaEmulatorStep with common utilities."""
-  def __init__(self,
-               fuchsia_sdk_root,
-               fuchsia_workdir,
-               product='terminal.x64',
-               name='Unknown FEMU step',
-               **kwargs):
-    """Creates a BaseFuchsiaEmulatorStep instance.
-
-    Args:
-      fuchsia_sdk_root: Path to the Fuchsia SDK.
-      fuchsia_workdir: Path to directory with Fuchsia artifacts.
-      product: Product image to run the emulator on.
-      name: Name of the step to pass to BaseStep.
-      kwargs: Remaining args to pass to BaseStep.
-    """
-    base_step.BaseStep.__init__(self, name=name, **kwargs)
-    self._fuchsia_sdk_root = fuchsia_sdk_root
-    prod, board = product.split('.', 1)
-    self._product_bundle = os.path.join(fuchsia_sdk_root, 'images', prod, board)
-    # GN SDK scripts
-    self._ffx = os.path.join(self._fuchsia_sdk_root, 'sdk/tools/x64/ffx')
-
-    # Common Paths
-    self._fuchsia_workdir_path = fuchsia_workdir
-
-  def _dump_file_to_log(self, file_path):
-    """Dump file to log.
-
-    Args:
-        file_path: Location of the file to add to the logger output.
-    """
-    logging.info('%s log:', file_path)
-    with open(file_path) as file_fd:
-      logging.info(file_fd.read())
+_STEP_DATA_ENV_PID = 'step_data_env_pid'
 
 
-class FuchsiaEmulatorStartStep(BaseFuchsiaEmulatorStep):
-  """Step for starting local Fuchsia assets on FEMU."""
+class FuchsiaEmulatorStartStep(base_step.BaseStep):
+  """Step for starting local Fuchsia assets."""
 
-  def __init__(self, **kwargs):
+  def __init__(self, chromium_root, out_dir, fuchsia_workdir, **kwargs):
     """Creates a FuchsiaEmulatorCreateStep instance.
 
     Args:
       kwargs: Passed to FuchsiaEmulatorBaseStep.
     """
-    BaseFuchsiaEmulatorStep.__init__(self, name="Start FEMU", **kwargs)
-    self.femu_log = None
-    self.package_server_log = None
-    self._ports_in_use = []
+    base_step.BaseStep.__init__(self, name='Start fuchsia test env', **kwargs)
+    self._chromium_root = chromium_root
+    self._out_dir = out_dir
+    self._fuchsia_workdir = fuchsia_workdir
 
-  def _get_available_tcp_port(self, max_attempts=100):
-    """Find a unique and open port.
-
-    Verify port is available by opening a listening socket on it.
-
-    Verify uniqueness by storing a list of ports returned, and ensuring no
-    duplicates in this list.
-    """
-    # Search for a port that is open and verify it isn't
-    # already being used for another FEMU port.
-    attempts = 0
-    while attempts < max_attempts:
-      attempts += 1
-      # Look for an open port
-      sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-      sock.bind(("", 0))
-      port = sock.getsockname()[1]
-      sock.close()
-      # Verify port is not being used for another port
-      if port not in self._ports_in_use:
-        break
-    if attempts >= max_attempts:
-      raise FuchsiaEmulatorError('Failed to find open tcp port')
-    self._ports_in_use.append(port)
-    return port
-
-  def write_file(self, file, content):
-    with open(file, 'w') as f:
-      f.write(content)
-
-  def create_ssh_config(self):
-    """Create SSH config for the test runner to connect to FEMU.
-
-    Assumes that FEMU has already been started.
-    """
-    home_dir = os.path.expanduser('~')
-    ssh_dir = os.path.join(home_dir, '.ssh')
-    returncode, _, _ = self.exec_subprocess(['mkdir', '-p', ssh_dir])
-    if returncode != 0:
-      self._dump_file_to_log(self.femu_log)
-      self._dump_file_to_log(self.package_server_log)
-      raise FuchsiaEmulatorError('Failed to create ssh out dir')
-    ssh_config_path = os.path.join(home_dir, '.fuchsia', 'sshconfig')
-    ssh_config_content = _SSH_CONFIG_TEMPLATE.format(
-      port=self._ssh_port,
-      auth_path=os.path.join(ssh_dir, 'fuchsia_ed25519'))
-    self.write_file(ssh_config_path, ssh_config_content)
-    logging.info('ssh_config for chromium test at %s', ssh_config_path)
-
-  def start_femu(self):
-    """Start Fuchsia Emulator process."""
-    self._ssh_port = self._get_available_tcp_port()
-
-    _, self.femu_log = self.get_new_tmp_file()
-    logging.info('femu log at %s', self.femu_log)
-    self.add_step_data(_STEP_DATA_FEMU_LOG, self.femu_log)
-
-    _, self.package_server_log = self.get_new_tmp_file()
-    logging.info('package server log at %s', self.package_server_log)
-    self.add_step_data(_STEP_DATA_PACKAGE_SERVER_LOG, self.package_server_log)
-
-    # HACK(b/290049007)
-    # For currently unknown reasons, there is an ffx daemon socket already
-    # present, but occassionally unresponsive. The emu start command will
-    # fail if it attempts to use this bad daemon. As a stop-gap measure
-    # to get Eureka CQ/CB back to green, we will force restart the daemon.
-    ffx_daemon_stop = [
-        self._ffx,
-        '-c', 'log.level=debug',
-        '-c', 'log.dir={}'.format(self._fuchsia_workdir_path),
-        'daemon',
-        'stop',
-        '-t', '5000',
-    ]
-    self.exec_subprocess(ffx_daemon_stop, check_output=True)
-
-    # Start FEMU
-    femu_command = [
-        self._ffx,
-        '--timeout', '100',
-        '-c', 'log.level=debug',
-        '-c', 'log.dir={}'.format(self._fuchsia_workdir_path),
-        'emu',
-        'start',
-        '--startup-timeout', '100',
-        self._product_bundle,
-        '--log', self.femu_log,
-        '--port-map', f'ssh:{self._ssh_port}',
-        '--headless',  # Run without gui on infra
-    ]
-    self.exec_subprocess(femu_command, check_output=True)
 
   def run(self):
-    """Starts a local FEMU instance."""
-    self.start_femu()
-    # Copy SSH config needed for chromium test runners.
-    self.create_ssh_config()
-    # Output logs to give additional debugging information.
-    self._dump_file_to_log(self.femu_log)
-    # Store femu ports in step data for test steps.
-    self.add_step_data(_STEP_DATA_FEMU_SSH, self._ssh_port)
+    daemon_dir = os.path.join(self._fuchsia_workdir, 'daemon')
+    os.makedirs(daemon_dir, exist_ok=True)
+    self.add_step_data('step_data_daemon_dir', daemon_dir)
+    proc = self.start_subprocess(
+            None,
+            subprocess.PIPE,
+            subprocess.PIPE, [
+              os.path.join(self._chromium_root,
+                           'build/fuchsia/test/test_env_setup.py'),
+              '--logs-dir',
+              self.get_gcs_dir(),
+            ],
+            env={**os.environ,
+                 'FFX_ISOLATE_DIR': daemon_dir,
+                 'CHROME_HEADLESS': '1'})[0]
+    pid_file = os.path.join(self.get_gcs_dir(),
+                            'test_env_setup.' + str(proc.pid) + '.pid')
+    while not os.path.isfile(pid_file):
+        proc.poll()
+        if proc.returncode:
+            return False
+        time.sleep(1)
+    self.add_step_data(_STEP_DATA_ENV_PID, proc.pid)
     return True
 
 
-class FuchsiaEmulatorTeardownStep(BaseFuchsiaEmulatorStep):
+class FuchsiaEmulatorTeardownStep(base_step.BaseStep):
   """Step for cleaning up after the FuchsiaEmulatorStartStep."""
 
   def __init__(self, **kwargs):
     """Creates a FuchsiaEmulatorTeardownStep instance."""
-    BaseFuchsiaEmulatorStep.__init__(self, name="Teardown FEMU", **kwargs)
-
-  def _copy_logs_to_gcs(self):
-    """Copies FuchsiaEmulator related logs to the GCS directory"""
-    log_source_files_to_destinations = {
-        self.get_step_data(_STEP_DATA_FEMU_LOG): 'femu_system_log.txt',
-        self.get_step_data(
-            _STEP_DATA_PACKAGE_SERVER_LOG
-        ): 'package_server_log.txt',
-        os.path.join(self._fuchsia_workdir_path, _FFX_LOG): 'ffx.log',
-        os.path.join(
-            self._fuchsia_workdir_path, _FFX_DAEMON_LOG
-        ): 'ffx.daemon.log',
-    }
-
-    for src, dst in log_source_files_to_destinations.items():
-      if not os.path.isfile(src):
-        logging.warning('The expected log file "%s" does not exist!', src)
-        continue
-
-      gcs_upload_dst = os.path.join(self.get_gcs_dir(), dst)
-      shutil.copyfile(src, gcs_upload_dst)
+    base_step.BaseStep.__init__(self,
+                                name='Teardown fuchsia test env',
+                                **kwargs)
 
   def run(self):
     """Cleanup after FuchsiaEmulatorTeardownStep."""
-    # Kill FEMU
-    self._copy_logs_to_gcs()
-    femu_command = [self._ffx,
-                    'emu',
-                    'stop',
-                    '--all',
-                    ]
-    # Don't check the return code. If emulator crashed, ffx emu will
-    # throw an error but it doesn't mean the kill action failed.
-    self.exec_subprocess(femu_command)
+    self.exec_subprocess(['kill', str(self.get_step_data(_STEP_DATA_ENV_PID))])
     return True
diff --git a/cq/scripts/slave/step/fuchsia_unittest_step.py b/cq/scripts/slave/step/fuchsia_unittest_step.py
index ba538ad..d5d422f 100644
--- a/cq/scripts/slave/step/fuchsia_unittest_step.py
+++ b/cq/scripts/slave/step/fuchsia_unittest_step.py
@@ -15,11 +15,9 @@
 class FuchsiaUnitTestStep(base_step.BaseStep):
   """Build step class to run runner scripts as unit tests."""
 
-  def __init__(self, out_dir, fuchsia_workdir, recipe_steps,
+  def __init__(self, out_dir, recipe_steps,
                name='fuchsia_unit_test', **kwargs):
     base_step.BaseStep.__init__(self, name=name, **kwargs)
-    self._changed_files = None
-    self._fuchsia_workdir = fuchsia_workdir
     self._out_dir = out_dir
     self._step_kwargs = kwargs
     self._recipe_steps = recipe_steps
@@ -70,6 +68,9 @@
         halt_on_failure=fail_fast,
         name=step_name,
         timeout_secs=max_timeout_secs,
+        env={**os.environ,
+             'FFX_ISOLATE_DIR': self.get_step_data('step_data_daemon_dir'),
+             'CHROME_HEADLESS': '1'},
         **self._step_kwargs)
     # This step should be added after the current fuchsia unittest step.
     # However, it is created during the recipe run, which is non-trivial.
@@ -102,15 +103,15 @@
       suite_name = suite_cmd_parts[0]
 
       runner_script = 'run_{}'.format(suite_name)
-      fuchsia_out_dir = os.path.join(self._fuchsia_workdir, 'packages')
       logs_dir = os.path.join(self.get_gcs_dir(), suite_name)
       os.makedirs(logs_dir, exist_ok=True)
 
+      # TODO(b/352409265): Avoid restarting the repo; the run_* wrapper always
+      # creates a new repo and registers it with the device / emulator; it
+      # wastes several seconds per package.
       runner_cmd = [
           os.path.join(runner_folder, runner_script),
-          '--logs-dir', logs_dir,
-          '--device', 'device',
-          '--fuchsia-out-dir', fuchsia_out_dir
+          '--logs-dir', logs_dir, '--device',
       ] + suite_cmd_parts[1:]  # Append gtest filters to the command
 
       self.add_unittest_step(suite_name, runner_cmd, fail_fast=False)
diff --git a/cq/scripts/slave/step/landmine_step.py b/cq/scripts/slave/step/landmine_step.py
index 3022f4d..f7811a9 100644
--- a/cq/scripts/slave/step/landmine_step.py
+++ b/cq/scripts/slave/step/landmine_step.py
@@ -65,7 +65,12 @@
     the landmine CL and the landmine actually landing, the landmine can run
     again.
     """
-    all_patches = [{'project': self.patch_project}] + self.depends_on_list
+    if self.patch_project:
+      all_patches = [{
+        'project': self.patch_project,
+        'branch': self.get_property('patch_branch')}] + self.depends_on_list
+    else:
+      all_patches = self.depends_on_list
     changed_files = git_utils.get_changed_files_in_all_patches(
         self, all_patches, self.get_project_path_lookup_table())
     return all(landmine not in f for f in changed_files)
diff --git a/cq/scripts/slave/step/ota_fct_step.py b/cq/scripts/slave/step/ota_fct_step.py
index ee8b33c..89ca73a 100644
--- a/cq/scripts/slave/step/ota_fct_step.py
+++ b/cq/scripts/slave/step/ota_fct_step.py
@@ -40,7 +40,9 @@
     command = ['make']
     command += ['{}image'.format(t) for t in factory_image_types]
     command += ['BUILD_NUMBER={}'.format(self.build_number)]
+    command += self.build_accelerator.make_flags
     command += self.make_target_param(True)
+    command += self._make_fct_extra_flags()
 
     returncode, stdout, stderr = self.exec_subprocess(command)
     if returncode != 0:
diff --git a/cq/scripts/slave/step/ota_resign_step.py b/cq/scripts/slave/step/ota_resign_step.py
index 216f8da..897a005 100644
--- a/cq/scripts/slave/step/ota_resign_step.py
+++ b/cq/scripts/slave/step/ota_resign_step.py
@@ -131,6 +131,16 @@
     '-v',
 ]
 
+_2RS4_RESIGN_COMMAND = [
+    './vendor/amlogic/2rs4/build/tools/releasetools/ota_from_target_files',
+    '-v',
+]
+
+XUA4_RESIGN_COMMAND = [
+    './vendor/amlogic/xua4/build/tools/releasetools/ota_from_target_files',
+    '-v',
+]
+
 ASSISTANTDEFAULT_RESIGN = {
     'command': ['cp'],
     'source_binary_path': '{dist_dir}/{product}-ota-{buildset}.zip',
@@ -441,10 +451,30 @@
 SWIFT_RESIGN = {
     'command': SWIFT_RESIGN_COMMAND,
     'variants': [
-        {'board': 'swift-p0'},
-        {'board': 'swift-p0', 'omaha_channel': 'beta-channel'},
         {'board': 'swift-p1'},
         {'board': 'swift-p1', 'omaha_channel': 'beta-channel'},
+        {'board': 'swift-p2'},
+        {'board': 'swift-p2', 'omaha_channel': 'beta-channel'},
+    ],
+}
+
+GTK_9EH4_RESIGN = {
+    'command': SWIFT_RESIGN_COMMAND,
+    'variants': [
+        {'board': 'swift-p1'},
+        {'board': 'swift-p1', 'omaha_channel': 'beta-channel'},
+        {'board': 'swift-p2'},
+        {'board': 'swift-p2', 'omaha_channel': 'beta-channel'},
+    ],
+}
+
+GTK_9EH4_FACTORY = {
+    'command': SWIFT_RESIGN_COMMAND,
+    'factory_image_types': ['fct'],
+    'source_binary_path': SOURCE_FACTORY_PATH,
+    'variants': [
+        {'board': 'swift-p1', 'usb_host_package': None},
+        {'board': 'swift-p2', 'usb_host_package': None},
     ],
 }
 
@@ -1099,6 +1129,59 @@
     'command': BLA4_RESIGN_COMMAND,
     'variants': [
         {'board': 'bla4-p0'},
+        {'board': 'bla4-p0', 'omaha_channel': 'beta-channel'},
+        {'board': 'bla4-p1'},
+        {'board': 'bla4-p1', 'omaha_channel': 'beta-channel'},
+        {'board': 'bla4-p2'},
+        {'board': 'bla4-p2', 'omaha_channel': 'beta-channel'},
+    ],
+}
+
+BLA4_FACTORY = {
+    'command': BLA4_RESIGN_COMMAND,
+    'factory_image_types': ['fct'],
+    'source_binary_path': SOURCE_FACTORY_PATH,
+    'variants': [
+        {'board': 'bla4-p0', 'usb_host_package': None},
+        {'board': 'bla4-p1', 'usb_host_package': None},
+        {'board': 'bla4-p2', 'usb_host_package': None},
+    ],
+}
+
+_2RS4_RESIGN = {
+    'command': _2RS4_RESIGN_COMMAND,
+    'variants': [
+        {'board': '2rs4-p1'},
+        {'board': '2rs4-p1', 'omaha_channel': 'beta-channel'},
+        {'board': '2rs4-p2'},
+        {'board': '2rs4-p2', 'omaha_channel': 'beta-channel'},
+    ],
+}
+
+_2RS4_FACTORY = {
+    'command': _2RS4_RESIGN_COMMAND,
+    'factory_image_types': ['fct'],
+    'source_binary_path': SOURCE_FACTORY_PATH,
+    'variants': [
+        {'board': '2rs4-p1', 'usb_host_package': None},
+        {'board': '2rs4-p2', 'usb_host_package': None},
+    ],
+}
+
+XUA4_RESIGN = {
+    'command': XUA4_RESIGN_COMMAND,
+    'variants': [
+        {'board': 'xua4-p1'},
+        {'board': 'xua4-p1', 'omaha_channel': 'beta-channel'},
+    ],
+}
+
+XUA4_FACTORY = {
+    'command': XUA4_RESIGN_COMMAND,
+    'factory_image_types': ['fct'],
+    'source_binary_path': SOURCE_FACTORY_PATH,
+    'variants': [
+        {'board': 'xua4-p1', 'usb_host_package': None},
     ],
 }
 
diff --git a/cq/scripts/slave/step/ota_step.py b/cq/scripts/slave/step/ota_step.py
index 1ce772c..6d57c90 100644
--- a/cq/scripts/slave/step/ota_step.py
+++ b/cq/scripts/slave/step/ota_step.py
@@ -44,7 +44,7 @@
   """Build step class for building OTA's."""
 
   def __init__(self, build_name, name='build ota', board_name=None,
-               extra_params=None, max_ota_size=None, target_name=None,
+               extra_params=None, fct_extra_params=None, max_ota_size=None, target_name=None,
                timeout_secs=_OTA_STEP_MAX_TIME_SECONDS,
                **kwargs):
     """Creates a OtaStep instance.
@@ -54,6 +54,7 @@
       name: user-visible name of this step.
       board_name: optional BOARD_NAME param for make if any.
       extra_params: optional list of params to add
+      fct_extra_params: optional list of params to add for building fct
       max_ota_size: Max size of OTA to enfoce if any enforcement required.
       timeout_secs: Max execution time for OTA steps.
       **kwargs: Any additional args to pass to BaseStep.
@@ -63,6 +64,7 @@
     self._build_name = build_name
     self._board_name = board_name
     self._extra_params = extra_params
+    self._fct_extra_params = fct_extra_params
     self._max_ota_size = max_ota_size
     self._target_name = target_name
 
@@ -180,7 +182,7 @@
     if self._is_ml_framework_build:
       flags += ['ENABLE_ML_FRAMEWORK=true']
     if self._is_demo_build:
-      flags += ['DEMO_OTA=true']
+      flags += ['RETAIL_DEMO_BUILD=true']
     if self._board_name:
       flags += ['BOARD_NAME={}'.format(self._board_name)]
     if self._is_1led_build:
@@ -193,6 +195,12 @@
       flags += self._extra_params
     return flags
 
+  def _make_fct_extra_flags(self):
+    flags = []
+    if self._fct_extra_params:
+      flags += self._fct_extra_params
+    return flags
+
   def _build_ota(self, custom_shell):
     """Builds |build_name| OTA package for the |issue| and |patchset|.
 
diff --git a/cq/scripts/slave/step/precommit_step.py b/cq/scripts/slave/step/precommit_step.py
index 819643e..5195a14 100644
--- a/cq/scripts/slave/step/precommit_step.py
+++ b/cq/scripts/slave/step/precommit_step.py
@@ -2,11 +2,9 @@
 
 See documentation on the precommit recipe - this module
 contains the single step in that recipe."""
-from __future__ import absolute_import
 import logging
 import os
 import pprint
-import tempfile
 
 from helpers import git_utils
 from helpers import os_utils
@@ -16,20 +14,24 @@
 class PreCommitStep(base_step.BaseStep):
   """Step for running pre-commit hook checks."""
 
-  def __init__(self, config_file, repo_root_directory, show_output, **kwargs):
+  def __init__(self, config_file, repo_root_directory, show_output, patch_project: str, commit_counts: int, **kwargs):
     """
     Args:
       config_file: The pre-commit config file to use.
       repo_root_directory: The root directory to the repo checkout.
       show_output: True to make the output a comment on the CL.
+      patch_project: The project to check the precommit.
+      commit_counts: Number of commits made to the current project.
       **kwargs: Additional args passed to BaseStep.
     """
-    base_step.BaseStep.__init__(self, name="precommit", **kwargs)
+    base_step.BaseStep.__init__(self, name=f"{patch_project}_precommit", **kwargs)
     self._config_file = config_file
     self._repo_root_directory = repo_root_directory
     self._show_output = show_output
+    self._patch_project = patch_project
+    self._commit_counts = commit_counts
 
-  def run(self):
+  def run(self) -> bool:
     """Run the step.
 
     Returns:
@@ -38,9 +40,9 @@
         2. There was no pre-commit config.
     """
     try:
-        directory = self.directory
+        directory = self.get_project_path(self._patch_project)
     except KeyError:
-        logging.info("Unable to get self.directory. Skipping pre-commit.")
+        logging.info("Unable to get directory of project %s. Skipping pre-commit.", self._patch_project)
         return True
 
     pre_commit_config = os.path.join(directory, self._config_file)
@@ -49,10 +51,10 @@
           pre_commit_config, os.path.abspath(pre_commit_config))
       return True
 
-    results = [
-      self._run_commit_message_hooks(),
-      self._run_pre_commit_hooks(),
-      self._run_push_hooks(),
+    results = [self._run_commit_message_hooks(directory, i) for i in range(self._commit_counts)]
+    results += [
+      self._run_pre_commit_hooks(directory),
+      self._run_push_hooks(directory),
     ]
     returncode = max(returncode for returncode, _, _ in results)
     stdout = "\n".join(stdout for _, stdout, _ in results)
@@ -99,18 +101,21 @@
 
   def _precommit_binary(self) -> str:
     """Get the path to pre-commit in our recipe's virtualenv."""
-    return os.path.join(self.get_step_data("virtual_env_root"),
-      "bin", "pre-commit")
+    return "/workspace/recipe_python_virtual_env/bin/pre-commit"
 
-  def _run_commit_message_hooks(self):
+  def _run_commit_message_hooks(self, directory: str, skips: int) -> tuple[int, str, str]:
     """Run all the hooks for commit message checks.
 
+    Args:
+      directory: The directory to execute the command.
+      skips: Number of git commit messages to skip.
+
     Returns:
       A tuple of returncode, stdout, stderr from pre-commit
     """
     env = self._get_env()
-    with os_utils.change_cwd(self.directory):
-      commit_message = git_utils.commit_message(self)
+    with os_utils.change_cwd(directory):
+      commit_message = git_utils.commit_message(self, skip_counts=skips)
       commit_message_path = self.write_temp_file(commit_message)
       logging.debug("Using commit message tempfile %s", commit_message_path)
       cmd = [
@@ -122,7 +127,7 @@
       ]
       return self.exec_subprocess(cmd, env=env)
 
-  def _run_pre_commit_hooks(self):
+  def _run_pre_commit_hooks(self, directory: str) -> tuple[int, str, str]:
     """Run all the hooks of the 'pre-commit' type.
 
     This is an unfortunate naming problem - 'pre-commit' the framework
@@ -130,11 +135,14 @@
     to run before committing. This function runs just those hooks, as
     opposted to pre-push, commit-message, or other hook types.
 
+    Args:
+      directory: The directory to execute the command.
+
     Returns:
       A tuple of returncode, stdout, stderr from pre-commit
     """
     env = self._get_env()
-    with os_utils.change_cwd(self.directory):
+    with os_utils.change_cwd(directory):
       cmd = [
         self._precommit_binary(),
         "run",
@@ -144,14 +152,17 @@
       ]
       return self.exec_subprocess(cmd, env=env)
 
-  def _run_push_hooks(self):
+  def _run_push_hooks(self, directory: str) -> tuple[int, str, str]:
     """Run all the hooks of the 'push' stage.
 
+    Args:
+      directory: The directory to execute the command.
+
     Returns:
       A tuple of returncode, stdout, stderr from pre-commit
     """
     env = self._get_env()
-    with os_utils.change_cwd(self.directory):
+    with os_utils.change_cwd(directory):
       cmd = [
         self._precommit_binary(),
         "run",
diff --git a/cq/scripts/slave/step/unittest_step.py b/cq/scripts/slave/step/unittest_step.py
index 6e13dfa..fbde26a 100644
--- a/cq/scripts/slave/step/unittest_step.py
+++ b/cq/scripts/slave/step/unittest_step.py
@@ -199,7 +199,12 @@
 
   def _get_changed_files(self):
     if self._changed_files is None:
-      all_patches = [{'project': self.patch_project}] + self.depends_on_list
+      if self.patch_project:
+        all_patches = [{
+          'project': self.patch_project,
+          'branch': self.get_property('patch_branch')}] + self.depends_on_list
+      else:
+        all_patches = self.depends_on_list
       self._changed_files = git_utils.get_changed_files_in_all_patches(
           self, all_patches, self.get_project_path_lookup_table())
     return self._changed_files
diff --git a/cq/scripts/slave/step/x86_camera_step.py b/cq/scripts/slave/step/x86_camera_step.py
index f1ab6a0..f6490ab 100644
--- a/cq/scripts/slave/step/x86_camera_step.py
+++ b/cq/scripts/slave/step/x86_camera_step.py
@@ -66,6 +66,7 @@
 ]
 SIM_CONFIG = "virtual-home/cirque-park/build_eureka/{}/"
 MOJO_EVENTS = "virtual-home/cirque-park/build_eureka/mojo_events/"
+OPENWEAVE = "virtual-home/cirque-park/openweave/"
 SAMPLE_VIDEO = "vendor/nest/reconnect/data/test/proxy_video.h264"
 SAMPLE_AUDIO = "vendor/nest/reconnect/data/test/proxy_audio_mono.opus"
 SAMPLE_JPEG = "vendor/nest/reconnect/data/test/{}_sample.jpeg"
@@ -182,11 +183,12 @@
       os.path.join(self.get_gcs_dir(), tar_filename),
       SIM_CONFIG.format("{}_x86".format(self.product)),
       MOJO_EVENTS,
+      OPENWEAVE,
       SAMPLE_VIDEO,
       SAMPLE_AUDIO,
       SAMPLE_JPEG.format(self.product)
     ]
-    command += [path.format(TARGET_PRODUCT[self.product]) for path in SIM_SYSTEM]
+    command += [path.format(TARGET_PRODUCT[self.product] + "-" + BUILD_VARIANT) for path in SIM_SYSTEM]
     command += SIM_LIB
     returncode, _, stderr = self.exec_subprocess(command)
     if returncode != 0:
diff --git a/cq/scripts/tools/run_recipe.py b/cq/scripts/tools/run_recipe.py
index 8cd403c..5fd21cb 100755
--- a/cq/scripts/tools/run_recipe.py
+++ b/cq/scripts/tools/run_recipe.py
@@ -550,7 +550,9 @@
 
 
 def print_command(command):
-  return sys.__stdout__.write(json.dumps(command) + '\n')
+  length = sys.__stdout__.write(json.dumps(command) + '\n')
+  sys.__stdout__.flush()
+  return length
 
 
 class JsonIOWrapper(io.TextIOBase):
@@ -833,6 +835,7 @@
       'issue': int(main_patch['change_number']),
       'patch_project': main_patch['project'],
       'patchset': int(main_patch['patch_number']),
+      'patch_branch': main_patch['target_branch'],
   }