Skip to content
Snippets Groups Projects
Unverified Commit cf7dbbf6 authored by Peter Scheibel's avatar Peter Scheibel Committed by GitHub
Browse files

Remove DB conversion of old index.yaml (#15298)

Removed the code that was converting the old index.yaml format into
index.json. Since the change happened in #2189 it should be
considered safe to drop this (untested) code.
parent 3ea0e915
No related branches found
No related tags found
No related merge requests found
...@@ -18,32 +18,27 @@ ...@@ -18,32 +18,27 @@
as the authoritative database of packages in Spack. This module as the authoritative database of packages in Spack. This module
provides a cache and a sanity checking mechanism for what is in the provides a cache and a sanity checking mechanism for what is in the
filesystem. filesystem.
""" """
import contextlib
import datetime import datetime
import time
import os import os
import sys
import socket import socket
import contextlib import sys
from six import string_types import time
from six import iteritems
from ruamel.yaml.error import MarkedYAMLError, YAMLError
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp import six
import spack.store
import spack.repo import spack.repo
import spack.spec import spack.spec
import spack.store
import spack.util.lock as lk import spack.util.lock as lk
import spack.util.spack_yaml as syaml
import spack.util.spack_json as sjson import spack.util.spack_json as sjson
from spack.filesystem_view import YamlFilesystemView from llnl.util.filesystem import mkdirp
from spack.util.crypto import bit_length
from spack.directory_layout import DirectoryLayoutError from spack.directory_layout import DirectoryLayoutError
from spack.error import SpackError from spack.error import SpackError
from spack.filesystem_view import YamlFilesystemView
from spack.util.crypto import bit_length
from spack.version import Version from spack.version import Version
# TODO: Provide an API automatically retyring a build after detecting and # TODO: Provide an API automatically retyring a build after detecting and
...@@ -284,28 +279,20 @@ def __init__(self, root, db_dir=None, upstream_dbs=None, ...@@ -284,28 +279,20 @@ def __init__(self, root, db_dir=None, upstream_dbs=None,
exist. This is the ``db_dir``. exist. This is the ``db_dir``.
The Database will attempt to read an ``index.json`` file in The Database will attempt to read an ``index.json`` file in
``db_dir``. If it does not find one, it will fall back to read ``db_dir``. If that does not exist, it will create a database
an ``index.yaml`` if one is present. If that does not exist, it when needed by scanning the entire Database root for ``spec.yaml``
will create a database when needed by scanning the entire files according to Spack's ``DirectoryLayout``.
Database root for ``spec.yaml`` files according to Spack's
``DirectoryLayout``.
Caller may optionally provide a custom ``db_dir`` parameter Caller may optionally provide a custom ``db_dir`` parameter
where data will be stored. This is intended to be used for where data will be stored. This is intended to be used for
testing the Database class. testing the Database class.
""" """
self.root = root self.root = root
if db_dir is None: # If the db_dir is not provided, default to within the db root.
# If the db_dir is not provided, default to within the db root. self._db_dir = db_dir or os.path.join(self.root, _db_dirname)
self._db_dir = os.path.join(self.root, _db_dirname)
else:
# Allow customizing the database directory location for testing.
self._db_dir = db_dir
# Set up layout of database files within the db dir # Set up layout of database files within the db dir
self._old_yaml_index_path = os.path.join(self._db_dir, 'index.yaml')
self._index_path = os.path.join(self._db_dir, 'index.json') self._index_path = os.path.join(self._db_dir, 'index.json')
self._lock_path = os.path.join(self._db_dir, 'lock') self._lock_path = os.path.join(self._db_dir, 'lock')
...@@ -554,7 +541,8 @@ def prefix_write_lock(self, spec): ...@@ -554,7 +541,8 @@ def prefix_write_lock(self, spec):
prefix_lock.release_write() prefix_lock.release_write()
def _write_to_file(self, stream): def _write_to_file(self, stream):
"""Write out the databsae to a JSON file. """Write out the database in JSON format to the stream passed
as argument.
This function does not do any locking or transactions. This function does not do any locking or transactions.
""" """
...@@ -576,9 +564,8 @@ def _write_to_file(self, stream): ...@@ -576,9 +564,8 @@ def _write_to_file(self, stream):
try: try:
sjson.dump(database, stream) sjson.dump(database, stream)
except YAMLError as e: except (TypeError, ValueError) as e:
raise syaml.SpackYAMLError( raise sjson.SpackJSONError("error writing JSON database:", str(e))
"error writing YAML database:", str(e))
def _read_spec_from_dict(self, hash_key, installs): def _read_spec_from_dict(self, hash_key, installs):
"""Recursively construct a spec from a hash in a YAML database. """Recursively construct a spec from a hash in a YAML database.
...@@ -649,28 +636,15 @@ def _assign_dependencies(self, hash_key, installs, data): ...@@ -649,28 +636,15 @@ def _assign_dependencies(self, hash_key, installs, data):
spec._add_dependency(child, dtypes) spec._add_dependency(child, dtypes)
def _read_from_file(self, stream, format='json'): def _read_from_file(self, filename):
""" """Fill database from file, do not maintain old data.
Fill database from file, do not maintain old data Translate the spec portions from node-dict form to spec form.
Translate the spec portions from node-dict form to spec form
Does not do any locking. Does not do any locking.
""" """
if format.lower() == 'json':
load = sjson.load
elif format.lower() == 'yaml':
load = syaml.load
else:
raise ValueError("Invalid database format: %s" % format)
try: try:
if isinstance(stream, string_types): with open(filename, 'r') as f:
with open(stream, 'r') as f: fdata = sjson.load(f)
fdata = load(f)
else:
fdata = load(stream)
except MarkedYAMLError as e:
raise syaml.SpackYAMLError("error parsing YAML database:", str(e))
except Exception as e: except Exception as e:
raise CorruptDatabaseError("error parsing database:", str(e)) raise CorruptDatabaseError("error parsing database:", str(e))
...@@ -682,12 +656,12 @@ def check(cond, msg): ...@@ -682,12 +656,12 @@ def check(cond, msg):
raise CorruptDatabaseError( raise CorruptDatabaseError(
"Spack database is corrupt: %s" % msg, self._index_path) "Spack database is corrupt: %s" % msg, self._index_path)
check('database' in fdata, "No 'database' attribute in YAML.") check('database' in fdata, "no 'database' attribute in JSON DB.")
# High-level file checks # High-level file checks
db = fdata['database'] db = fdata['database']
check('installs' in db, "No 'installs' in YAML DB.") check('installs' in db, "no 'installs' in JSON DB.")
check('version' in db, "No 'version' in YAML DB.") check('version' in db, "no 'version' in JSON DB.")
installs = db['installs'] installs = db['installs']
...@@ -763,7 +737,6 @@ def reindex(self, directory_layout): ...@@ -763,7 +737,6 @@ def reindex(self, directory_layout):
"""Build database index from scratch based on a directory layout. """Build database index from scratch based on a directory layout.
Locks the DB if it isn't locked already. Locks the DB if it isn't locked already.
""" """
if self.is_upstream: if self.is_upstream:
raise UpstreamDatabaseLockingError( raise UpstreamDatabaseLockingError(
...@@ -927,7 +900,6 @@ def _write(self, type, value, traceback): ...@@ -927,7 +900,6 @@ def _write(self, type, value, traceback):
after the start of the next transaction, when it read from disk again. after the start of the next transaction, when it read from disk again.
This routine does no locking. This routine does no locking.
""" """
# Do not write if exceptions were raised # Do not write if exceptions were raised
if type is not None: if type is not None:
...@@ -952,35 +924,23 @@ def _read(self): ...@@ -952,35 +924,23 @@ def _read(self):
"""Re-read Database from the data in the set location. """Re-read Database from the data in the set location.
This does no locking, with one exception: it will automatically This does no locking, with one exception: it will automatically
migrate an index.yaml to an index.json if possible. This requires try to regenerate a missing DB if local. This requires taking a
taking a write lock. write lock.
""" """
if os.path.isfile(self._index_path): if os.path.isfile(self._index_path):
# Read from JSON file if a JSON database exists # Read from file if a database exists
self._read_from_file(self._index_path, format='json') self._read_from_file(self._index_path)
return
elif os.path.isfile(self._old_yaml_index_path): elif self.is_upstream:
if (not self.is_upstream) and os.access( raise UpstreamDatabaseLockingError(
self._db_dir, os.R_OK | os.W_OK): "No database index file is present, and upstream"
# if we can write, then read AND write a JSON file. " databases cannot generate an index file")
self._read_from_file(self._old_yaml_index_path, format='yaml')
with lk.WriteTransaction(self.lock):
self._write(None, None, None)
else:
# Read chck for a YAML file if we can't find JSON.
self._read_from_file(self._old_yaml_index_path, format='yaml')
else: # The file doesn't exist, try to traverse the directory.
if self.is_upstream: # reindex() takes its own write lock, so no lock here.
raise UpstreamDatabaseLockingError( with lk.WriteTransaction(self.lock):
"No database index file is present, and upstream" self._write(None, None, None)
" databases cannot generate an index file") self.reindex(spack.store.layout)
# The file doesn't exist, try to traverse the directory.
# reindex() takes its own write lock, so no lock here.
with lk.WriteTransaction(self.lock):
self._write(None, None, None)
self.reindex(spack.store.layout)
def _add( def _add(
self, self,
...@@ -1060,7 +1020,9 @@ def _add( ...@@ -1060,7 +1020,9 @@ def _add(
) )
# Connect dependencies from the DB to the new copy. # Connect dependencies from the DB to the new copy.
for name, dep in iteritems(spec.dependencies_dict(_tracked_deps)): for name, dep in six.iteritems(
spec.dependencies_dict(_tracked_deps)
):
dkey = dep.spec.dag_hash() dkey = dep.spec.dag_hash()
upstream, record = self.query_by_spec_hash(dkey) upstream, record = self.query_by_spec_hash(dkey)
new_spec._add_dependency(record.spec, dep.deptypes) new_spec._add_dependency(record.spec, dep.deptypes)
...@@ -1133,8 +1095,7 @@ def _increment_ref_count(self, spec): ...@@ -1133,8 +1095,7 @@ def _increment_ref_count(self, spec):
rec.ref_count += 1 rec.ref_count += 1
def _remove(self, spec): def _remove(self, spec):
"""Non-locking version of remove(); does real work. """Non-locking version of remove(); does real work."""
"""
key = self._get_matching_spec_key(spec) key = self._get_matching_spec_key(spec)
rec = self._data[key] rec = self._data[key]
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment