Skip to content

Commit

Permalink
refactor
Browse files Browse the repository at this point in the history
  • Loading branch information
kostmo committed Nov 12, 2024
1 parent 5f2ae54 commit e5383a6
Show file tree
Hide file tree
Showing 4 changed files with 86 additions and 78 deletions.
Original file line number Diff line number Diff line change
@@ -1,15 +1,17 @@
{-# LANGUAGE OverloadedStrings #-}

-- |
-- SPDX-License-Identifier: BSD-3-Clause
--
-- Types strictly for debugging structure recognition via the web interface
module Swarm.Game.Scenario.Topography.Structure.Recognition.Log where

import Data.Aeson
import Data.Int (Int32)
import Data.IntSet.NonEmpty (NEIntSet)
import Data.List.NonEmpty (NonEmpty)
import Data.List.NonEmpty qualified as NE
import Data.Text (Text)
import Data.Text qualified as T
import GHC.Generics (Generic)
import Linear (V2)
import Servant.Docs (ToSample)
import Servant.Docs qualified as SD
import Swarm.Game.Location (Location)
Expand All @@ -31,6 +33,10 @@ data OrientedStructure = OrientedStructure
distillLabel :: StructureWithGrid b a -> OrientedStructure
distillLabel swg = OrientedStructure (getName $ originalDefinition swg) (rotatedTo swg)

renderSharedNames :: ConsolidatedRowReferences b a -> Text
renderSharedNames =
T.intercalate "/" . NE.toList . NE.nub . NE.map (getName . originalDefinition . wholeStructure) . referencingRows

newtype EntityKeyedFinder = EntityKeyedFinder
{ searchOffsets :: InspectionOffsets
}
Expand Down Expand Up @@ -73,39 +79,10 @@ data WrongRecurrenceCountExplanation = WrongRecurrenceCountExplanation
}
deriving (Generic, ToJSON)

-- | The located occurrences of a specific contiguous chunk of entities.
-- Note that an identical chunk may recur more than once in a structure row.
-- This record represents all of the recurrences of one such chunk.
--
-- Any different chunks contained within a row will be described by
-- their own instance of this record.
--
-- Note: By virtue of the searching algorithm, these indices
-- are expected to automatically be in sorted order
data FoundAndExpectedChunkPositions = FoundAndExpectedChunkPositions
{ foundPositions :: NEIntSet
, expectedPositions :: NEIntSet
}
deriving (Generic, ToJSON)

data FoundRowFromChunk a = FoundRowFromChunk
{ chunkOffsetFromSearchBorder :: Int
, rowIndexWithinStructure :: Int32
, structurePositionOffset :: V2 Int32
, chunkStructure :: a
}
deriving (Functor, Generic, ToJSON)

data ChunkedRowMatch a e = ChunkedRowMatch
{ positionsComparison :: [(FoundAndExpectedChunkPositions, NonEmpty e)]
, foundChunkRow :: FoundRowFromChunk a
}
deriving (Functor, Generic, ToJSON)

data SearchLog e
= FoundParticipatingEntity (ParticipatingEntity e)
| StructureRemoved OriginalName
| FoundCompleteStructureCandidates [OrientedStructure]
| FoundCompleteStructureCandidates [(OrientedStructure, Cosmic Location)]
| -- | this is actually internally used as a (Map (NonEmpty e) (NonEmpty Int)),
-- but the requirements of Functor force us to invert the mapping
FoundPiecewiseChunks [(NonEmpty Int, NonEmpty e)]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,14 +54,14 @@ mkOffsets pos (RowWidth w) =
mkEntityLookup ::
(Hashable a, Eq a) =>
[StructureWithGrid b a] ->
HM.HashMap a (AutomatonNewInfo b a)
HM.HashMap a (AutomatonInfo b a)
mkEntityLookup grids =
HM.map mkRowAutomatons rowsByEntityParticipation
where
-- Produces an automaton to evaluate whenever a given entity
-- is encountered.
mkRowAutomatons neList =
AutomatonNewInfo bounds $
AutomatonInfo bounds $
PiecewiseRecognition smPiecewise extractedChunksForLookup
where
bounds = sconcat $ NE.map expandedOffsets neList
Expand All @@ -70,8 +70,6 @@ mkEntityLookup grids =
HS.fromList . concat . NE.toList $
NE.map (map chunkContents . contiguousChunks) neList

-- TODO: These should be grouped by
-- row content to avoid redundant checks
extractedChunksForLookup = NE.map f neList
where
f x = RowChunkMatchingReference (myRow x) (mkRightMap x)
Expand All @@ -91,6 +89,13 @@ mkEntityLookup grids =
binTuplesHM
. map (myEntity &&& id)
. concatMap explodeRowEntities
$ structureRowsByContent

structureRowsByContent =
map (\(x, y) -> ConsolidatedRowReferences x y . gridWidth . wholeStructure $ NE.head y)
. HM.toList
. binTuplesHM
. map (rowContent &&& id)
$ allStructureRows grids

getContiguousChunks :: [Maybe a] -> [PositionedChunk a]
Expand All @@ -109,9 +114,9 @@ getContiguousChunks rowMembers =
-- are dropped but accounted for positionally when indexing the columns.
explodeRowEntities ::
(Hashable a, Eq a) =>
StructureRow b a ->
ConsolidatedRowReferences b a ->
[SingleRowEntityOccurrences b a]
explodeRowEntities annotatedRow@(StructureRow _ _ rowMembers) =
explodeRowEntities annotatedRow@(ConsolidatedRowReferences rowMembers _ rw) =
map f $ HM.toList $ binTuplesHM unconsolidatedEntityOccurrences
where
chunks = getContiguousChunks rowMembers
Expand All @@ -130,8 +135,8 @@ explodeRowEntities annotatedRow@(StructureRow _ _ rowMembers) =
zipWith (\idx -> fmap (PositionWithinRow idx annotatedRow,)) [0 ..] rowMembers

deriveEntityOffsets :: PositionWithinRow b a -> InspectionOffsets
deriveEntityOffsets (PositionWithinRow pos r) =
mkOffsets pos $ gridWidth $ wholeStructure r
deriveEntityOffsets (PositionWithinRow pos _) =
mkOffsets pos rw

-- * Util

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,9 +73,7 @@ entityModified entLoader modification cLoc recognizer = do
stateRevision <- case HM.lookup newEntity entLookup of
Nothing -> return oldRecognitionState
Just finder -> do
let logFinder f =
EntityKeyedFinder
(f ^. inspectionOffsets2)
let logFinder f = EntityKeyedFinder (f ^. inspectionOffsets)
msg =
FoundParticipatingEntity $
ParticipatingEntity newEntity $
Expand Down Expand Up @@ -170,17 +168,22 @@ checkChunksCombination

let candidateExpected = concatMap NE.toList candidateExpectedLists
tell . pure . ChunksMatchingExpected $
map (modifyChunkedRowMatch $ fmap $ getName . originalDefinition . wholeStructure) candidateExpected
map (modifyChunkedRowMatch $ fmap renderSharedNames) candidateExpected

let structurePositionsToCheck = map mkFoundStructure candidateExpected
let structurePositionsToCheck = concatMap mkFoundStructures candidateExpected
filterM validateIntactness2d structurePositionsToCheck
where
foundRowChunksLookup = fmap NEIS.fromList $ binTuplesHM $ map (pVal &&& pIndex) candidatesChunked

mkFoundStructure x =
FoundStructure
(wholeStructure $ chunkStructure $ foundChunkRow x)
(cLoc `offsetBy` structurePositionOffset (foundChunkRow x))
mkFoundStructures x =
NE.toList $ NE.map mkFoundStructure . referencingRows . chunkStructure $ foundChunkRow x
where
mkFoundStructure r =
FoundStructure
(wholeStructure r)
(cLoc `offsetBy` theOffset)
where
theOffset = V2 (horizontalStructPos $ foundChunkRow x) (rowIndex r)

validateIntactness2d fs = do
intactness <- lift $ ensureStructureIntact entLoader fs
Expand All @@ -192,7 +195,7 @@ checkChunksCombination
return intactness

subsetChecker (RowChunkMatchingReference r chunkPositionMap) =
left (ChunkMatchFailureReason $ getName . originalDefinition . wholeStructure $ r) $ do
left (ChunkMatchFailureReason $ renderSharedNames r) $ do
let isKeysSubset = referenceChunksKeys `HS.isSubsetOf` foundChunksKeys
unless isKeysSubset . Left $
NoKeysSubset $
Expand Down Expand Up @@ -225,7 +228,7 @@ checkChunksCombination
mkRowMatch theIntersection rowOffset =
ChunkedRowMatch
(map swap $ HM.toList theIntersection)
(FoundRowFromChunk rowOffset (rowIndex r) (V2 horizontalStructurePosition (rowIndex r)) r)
(FoundRowFromChunk rowOffset horizontalStructurePosition r)
where
horizontalStructurePosition = fromIntegral rowOffset + getMin (startOffset horizontalOffsets)

Expand All @@ -241,10 +244,10 @@ registerRowMatches ::
(Monad s, Hashable a, Eq b) =>
GenericEntLocator s a ->
Cosmic Location ->
AutomatonNewInfo b a ->
AutomatonInfo b a ->
RecognitionState b a ->
WriterT [SearchLog a] s (RecognitionState b a)
registerRowMatches entLoader cLoc (AutomatonNewInfo horizontalOffsets pwMatcher) rState = do
registerRowMatches entLoader cLoc (AutomatonInfo horizontalOffsets pwMatcher) rState = do
entitiesRow <-
lift $
getWorldRow
Expand All @@ -256,7 +259,7 @@ registerRowMatches entLoader cLoc (AutomatonNewInfo horizontalOffsets pwMatcher)

tell $ pure $ StartSearchAt cLoc horizontalOffsets

tell . pure . ExpectedChunks $ NE.map (HM.keys . confirmationMap) pwMaps
tell . pure . ExpectedChunks $ NE.map (HM.keys . confirmationMap) rowChunkReferences

let candidatesChunked = findAll pwSM entitiesRow

Expand All @@ -265,14 +268,14 @@ registerRowMatches entLoader cLoc (AutomatonNewInfo horizontalOffsets pwMatcher)
registerStructureMatches intactStructuresLists rState
where
registry = rState ^. foundStructures
PiecewiseRecognition pwSM pwMaps = pwMatcher
PiecewiseRecognition pwSM rowChunkReferences = pwMatcher

checkCombo =
checkChunksCombination
entLoader
cLoc
horizontalOffsets
pwMaps
rowChunkReferences

-- | We only have to do this once, for the "smallest" chunk occurrence size discrepancy.
-- All subsequent chunks will merely filter on this initial set.
Expand Down Expand Up @@ -316,5 +319,5 @@ registerStructureMatches unrankedCandidates oldState = do
-- Sorted by decreasing order of preference.
rankedCandidates = sortOn Down unrankedCandidates

getStructInfo (FoundStructure swg _) = distillLabel swg
getStructInfo (FoundStructure swg loc) = (distillLabel swg, loc)
newMsg = FoundCompleteStructureCandidates $ map getStructInfo rankedCandidates
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import Data.Aeson (ToJSON)
import Data.Function (on)
import Data.HashMap.Strict (HashMap)
import Data.Int (Int32)
import Data.IntSet.NonEmpty (NEIntSet)
import Data.List.NonEmpty (NonEmpty)
import Data.Map (Map)
import Data.Maybe (catMaybes)
Expand Down Expand Up @@ -81,14 +82,14 @@ type SymbolSequence a = [AtomicKeySymbol a]
data PositionWithinRow b a = PositionWithinRow
{ _position :: Int32
-- ^ horizontal index of the entity within the row
, structureRow :: StructureRow b a
, structureRow :: ConsolidatedRowReferences b a
}

-- | A chunkified version of a structure row.
-- Each unique structure row will need to test one of these
-- against the world row being examined.
data RowChunkMatchingReference b a = RowChunkMatchingReference
{ locatableRow :: StructureRow b a
{ locatableRows :: ConsolidatedRowReferences b a
, confirmationMap :: HashMap (NonEmpty a) (NonEmpty Int)
}

Expand All @@ -115,7 +116,7 @@ data PositionedChunk a = PositionedChunk
--
-- this record will contain two entries in its 'entityOccurrences' field.
data SingleRowEntityOccurrences b a = SingleRowEntityOccurrences
{ myRow :: StructureRow b a
{ myRow :: ConsolidatedRowReferences b a
, myEntity :: a
, contiguousChunks :: [PositionedChunk a]
, expandedOffsets :: InspectionOffsets
Expand Down Expand Up @@ -146,6 +147,14 @@ data StructureRow b a = StructureRow
, rowContent :: SymbolSequence a
}

-- | Represents all rows across all structures that share
-- a particular row content
data ConsolidatedRowReferences b a = ConsolidatedRowReferences
{ sharedRowContent :: SymbolSequence a
, referencingRows :: NonEmpty (StructureRow b a)
, theRowWidth :: RowWidth
}

-- | This wrapper facilitates naming the original structure
-- (i.e. the "payload" for recognition)
-- for the purpose of both UI display and internal uniqueness,
Expand Down Expand Up @@ -203,35 +212,21 @@ instance Semigroup InspectionOffsets where
InspectionOffsets l1 r1 <> InspectionOffsets l2 r2 =
InspectionOffsets (l1 <> l2) (r1 <> r2)

-- | Each automaton shall be initialized to recognize
-- a certain subset of structure rows, that may either
-- all be within one structure, or span multiple structures.
data AutomatonInfo en k v = AutomatonInfo
data AutomatonInfo v k = AutomatonInfo
{ _inspectionOffsets :: InspectionOffsets
, _automaton :: StateMachine k v
, _searchPairs :: NonEmpty ([k], v)
-- ^ these are the tuples input to the 'makeStateMachine' function,
-- for debugging purposes.
}
deriving (Generic)

makeLenses ''AutomatonInfo

data AutomatonNewInfo v k = AutomatonNewInfo
{ _inspectionOffsets2 :: InspectionOffsets
, piecewiseStuff :: PiecewiseRecognition v k
}
deriving (Generic)

makeLenses ''AutomatonNewInfo
makeLenses ''AutomatonInfo

-- | The complete set of data needed to identify applicable
-- structures, based on a just-placed entity.
data RecognizerAutomatons b a = RecognizerAutomatons
{ _originalStructureDefinitions :: Map OriginalName (StructureInfo b a)
-- ^ all of the structures that shall participate in automatic recognition.
-- This list is used only by the UI and by the 'Floorplan' command.
, _automatonsByEntity :: HashMap a (AutomatonNewInfo b a)
, _automatonsByEntity :: HashMap a (AutomatonInfo b a)
}
deriving (Generic)

Expand All @@ -248,6 +243,34 @@ data FoundStructure b a = FoundStructure
}
deriving (Eq)

data FoundRowFromChunk a = FoundRowFromChunk
{ chunkOffsetFromSearchBorder :: Int
, horizontalStructPos :: Int32
, chunkStructure :: a
}
deriving (Functor, Generic, ToJSON)

-- | The located occurrences of a specific contiguous chunk of entities.
-- Note that an identical chunk may recur more than once in a structure row.
-- This record represents all of the recurrences of one such chunk.
--
-- Any different chunks contained within a row will be described by
-- their own instance of this record.
--
-- Note: By virtue of the searching algorithm, these indices
-- are expected to automatically be in sorted order
data FoundAndExpectedChunkPositions = FoundAndExpectedChunkPositions
{ foundPositions :: NEIntSet
, expectedPositions :: NEIntSet
}
deriving (Generic, ToJSON)

data ChunkedRowMatch a e = ChunkedRowMatch
{ positionsComparison :: [(FoundAndExpectedChunkPositions, NonEmpty e)]
, foundChunkRow :: FoundRowFromChunk a
}
deriving (Functor, Generic, ToJSON)

-- | Ordering is by increasing preference between simultaneously
-- completed structures.
-- The preference heuristic is for:
Expand Down

0 comments on commit e5383a6

Please sign in to comment.