Skip to content
Merged
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 41 additions & 5 deletions sdk/python/feast/infra/online_stores/datastore.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
import logging
from datetime import datetime
from multiprocessing.pool import ThreadPool
from queue import Queue
from threading import Lock, Thread
from typing import Any, Callable, Dict, Iterator, List, Optional, Sequence, Tuple

from pydantic import PositiveInt, StrictStr
Expand All @@ -33,6 +36,8 @@
from feast.repo_config import FeastConfigBaseModel, RepoConfig
from feast.usage import log_exceptions_and_usage, tracing_span

LOGGER = logging.getLogger(__name__)

try:
from google.auth.exceptions import DefaultCredentialsError
from google.cloud import datastore
Expand Down Expand Up @@ -262,15 +267,46 @@ def online_read(
def _delete_all_values(client, key):
"""
Delete all data under the key path in datastore.

Creates and uses a queue of lists of entity keys, which are batch deleted
by multiple threads.
"""

class AtomicCounter(object):
# for tracking how many deletions have already occurred; not used outside this method
def __init__(self):
self.value = 0
self.lock = Lock()

def increment(self):
with self.lock:
self.value += 1

BATCH_SIZE = 500 # Dec 2021: delete_multi has a max size of 500
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nit is there a doc we can link to in case this changes in the future?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I just went looking and couldn't find an obvious place. If we try to query with more than 500, the server responds:

google.api_core.exceptions.InvalidArgument: 400 cannot write more than 500 entities in a single call

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

NUM_THREADS = 3
deletion_queue = Queue()
status_info_counter = AtomicCounter()

def worker(shared_counter):
while True:
client.delete_multi(deletion_queue.get())
shared_counter.increment()
LOGGER.debug(
f"batch deletions completed: {shared_counter.value} ({shared_counter.value * BATCH_SIZE} total entries) & outstanding queue size: {deletion_queue.qsize()}"
)
deletion_queue.task_done()

for _ in range(NUM_THREADS):
Thread(target=worker, args=(status_info_counter,), daemon=True).start()

query = client.query(kind="Row", ancestor=key)
while True:
query = client.query(kind="Row", ancestor=key)
entities = list(query.fetch(limit=1000))
entities = list(query.fetch(limit=BATCH_SIZE))
if not entities:
return
break
deletion_queue.put([entity.key for entity in entities])

for entity in entities:
client.delete(entity.key)
deletion_queue.join()


def _initialize_client(
Expand Down