Team Fortress 2 Source Code as on 22/4/2020
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

986 lines
26 KiB

  1. //========= Copyright Valve Corporation, All rights reserved. ============//
  2. //
  3. // Purpose:
  4. //
  5. // $NoKeywords: $
  6. //=============================================================================//
  7. #include "cbase.h"
  8. #include "ai_speech.h"
  9. #include "game.h"
  10. #include "engine/IEngineSound.h"
  11. #include "KeyValues.h"
  12. #include "ai_basenpc.h"
  13. #include "AI_Criteria.h"
  14. #include "isaverestore.h"
  15. #include "sceneentity.h"
  16. // memdbgon must be the last include file in a .cpp file!!!
  17. #include <tier0/memdbgon.h>
  18. #define DEBUG_AISPEECH 1
  19. #ifdef DEBUG_AISPEECH
  20. ConVar ai_debug_speech( "ai_debug_speech", "0" );
  21. #define DebuggingSpeech() ai_debug_speech.GetBool()
  22. #else
  23. inline void SpeechMsg( ... ) {}
  24. #define DebuggingSpeech() (false)
  25. #endif
  26. extern ConVar rr_debugresponses;
  27. //-----------------------------------------------------------------------------
  28. CAI_TimedSemaphore g_AIFriendliesTalkSemaphore;
  29. CAI_TimedSemaphore g_AIFoesTalkSemaphore;
  30. ConceptHistory_t::~ConceptHistory_t()
  31. {
  32. delete response;
  33. response = NULL;
  34. }
  35. ConceptHistory_t::ConceptHistory_t( const ConceptHistory_t& src )
  36. {
  37. timeSpoken = src.timeSpoken;
  38. response = NULL;
  39. if ( src.response )
  40. {
  41. response = new AI_Response( *src.response );
  42. }
  43. }
  44. ConceptHistory_t& ConceptHistory_t::operator =( const ConceptHistory_t& src )
  45. {
  46. if ( this != &src )
  47. {
  48. timeSpoken = src.timeSpoken;
  49. delete response;
  50. response = NULL;
  51. if ( src.response )
  52. {
  53. response = new AI_Response( *src.response );
  54. }
  55. }
  56. return *this;
  57. }
  58. BEGIN_SIMPLE_DATADESC( ConceptHistory_t )
  59. DEFINE_FIELD( timeSpoken, FIELD_TIME ), // Relative to server time
  60. // DEFINE_EMBEDDED( response, FIELD_??? ), // This is manually saved/restored by the ConceptHistory saverestore ops below
  61. END_DATADESC()
  62. class CConceptHistoriesDataOps : public CDefSaveRestoreOps
  63. {
  64. public:
  65. virtual void Save( const SaveRestoreFieldInfo_t &fieldInfo, ISave *pSave )
  66. {
  67. CUtlDict< ConceptHistory_t, int > *ch = ((CUtlDict< ConceptHistory_t, int > *)fieldInfo.pField);
  68. int count = ch->Count();
  69. pSave->WriteInt( &count );
  70. for ( int i = 0 ; i < count; i++ )
  71. {
  72. ConceptHistory_t *pHistory = &(*ch)[ i ];
  73. pSave->StartBlock();
  74. {
  75. // Write element name
  76. pSave->WriteString( ch->GetElementName( i ) );
  77. // Write data
  78. pSave->WriteAll( pHistory );
  79. // Write response blob
  80. bool hasresponse = !!pHistory->response;
  81. pSave->WriteBool( &hasresponse );
  82. if ( hasresponse )
  83. {
  84. pSave->WriteAll( pHistory->response );
  85. }
  86. // TODO: Could blat out pHistory->criteria pointer here, if it's needed
  87. }
  88. pSave->EndBlock();
  89. }
  90. }
  91. virtual void Restore( const SaveRestoreFieldInfo_t &fieldInfo, IRestore *pRestore )
  92. {
  93. CUtlDict< ConceptHistory_t, int > *ch = ((CUtlDict< ConceptHistory_t, int > *)fieldInfo.pField);
  94. int count = pRestore->ReadInt();
  95. Assert( count >= 0 );
  96. for ( int i = 0 ; i < count; i++ )
  97. {
  98. char conceptname[ 512 ];
  99. conceptname[ 0 ] = 0;
  100. ConceptHistory_t history;
  101. pRestore->StartBlock();
  102. {
  103. pRestore->ReadString( conceptname, sizeof( conceptname ), 0 );
  104. pRestore->ReadAll( &history );
  105. bool hasresponse = false;
  106. pRestore->ReadBool( &hasresponse );
  107. if ( hasresponse )
  108. {
  109. history.response = new AI_Response();
  110. pRestore->ReadAll( history.response );
  111. }
  112. }
  113. pRestore->EndBlock();
  114. // TODO: Could restore pHistory->criteria pointer here, if it's needed
  115. // Add to utldict
  116. if ( conceptname[0] != 0 )
  117. {
  118. ch->Insert( conceptname, history );
  119. }
  120. else
  121. {
  122. Assert( !"Error restoring ConceptHistory_t, discarding!" );
  123. }
  124. }
  125. }
  126. virtual void MakeEmpty( const SaveRestoreFieldInfo_t &fieldInfo )
  127. {
  128. }
  129. virtual bool IsEmpty( const SaveRestoreFieldInfo_t &fieldInfo )
  130. {
  131. CUtlDict< ConceptHistory_t, int > *ch = ((CUtlDict< ConceptHistory_t, int > *)fieldInfo.pField);
  132. return ch->Count() == 0 ? true : false;
  133. }
  134. };
  135. CConceptHistoriesDataOps g_ConceptHistoriesSaveDataOps;
  136. //-----------------------------------------------------------------------------
  137. //
  138. // CLASS: CAI_Expresser
  139. //
  140. BEGIN_SIMPLE_DATADESC( CAI_Expresser )
  141. // m_pSink (reconnected on load)
  142. // DEFINE_FIELD( m_pOuter, CHandle < CBaseFlex > ),
  143. DEFINE_CUSTOM_FIELD( m_ConceptHistories, &g_ConceptHistoriesSaveDataOps ),
  144. DEFINE_FIELD( m_flStopTalkTime, FIELD_TIME ),
  145. DEFINE_FIELD( m_flStopTalkTimeWithoutDelay, FIELD_TIME ),
  146. DEFINE_FIELD( m_flBlockedTalkTime, FIELD_TIME ),
  147. DEFINE_FIELD( m_voicePitch, FIELD_INTEGER ),
  148. DEFINE_FIELD( m_flLastTimeAcceptedSpeak, FIELD_TIME ),
  149. END_DATADESC()
  150. //-------------------------------------
  151. bool CAI_Expresser::SemaphoreIsAvailable( CBaseEntity *pTalker )
  152. {
  153. if ( !GetSink()->UseSemaphore() )
  154. return true;
  155. CAI_TimedSemaphore *pSemaphore = GetMySpeechSemaphore( pTalker->MyNPCPointer() );
  156. return (pSemaphore ? pSemaphore->IsAvailable( pTalker ) : true);
  157. }
  158. //-------------------------------------
  159. float CAI_Expresser::GetSemaphoreAvailableTime( CBaseEntity *pTalker )
  160. {
  161. CAI_TimedSemaphore *pSemaphore = GetMySpeechSemaphore( pTalker->MyNPCPointer() );
  162. return (pSemaphore ? pSemaphore->GetReleaseTime() : 0);
  163. }
  164. //-------------------------------------
  165. int CAI_Expresser::GetVoicePitch() const
  166. {
  167. return m_voicePitch + random->RandomInt(0,3);
  168. }
  169. #ifdef DEBUG
  170. static int g_nExpressers;
  171. #endif
  172. CAI_Expresser::CAI_Expresser( CBaseFlex *pOuter )
  173. : m_pOuter( pOuter ),
  174. m_pSink( NULL ),
  175. m_flStopTalkTime( 0 ),
  176. m_flLastTimeAcceptedSpeak( 0 ),
  177. m_flBlockedTalkTime( 0 ),
  178. m_flStopTalkTimeWithoutDelay( 0 ),
  179. m_voicePitch( 100 )
  180. {
  181. #ifdef DEBUG
  182. g_nExpressers++;
  183. #endif
  184. }
  185. CAI_Expresser::~CAI_Expresser()
  186. {
  187. m_ConceptHistories.Purge();
  188. CAI_TimedSemaphore *pSemaphore = GetMySpeechSemaphore( GetOuter() );
  189. if ( pSemaphore )
  190. {
  191. if ( pSemaphore->GetOwner() == GetOuter() )
  192. pSemaphore->Release();
  193. #ifdef DEBUG
  194. g_nExpressers--;
  195. if ( g_nExpressers == 0 && pSemaphore->GetOwner() )
  196. DevMsg( 2, "Speech semaphore being held by non-talker entity\n" );
  197. #endif
  198. }
  199. }
  200. //-----------------------------------------------------------------------------
  201. //-----------------------------------------------------------------------------
  202. void CAI_Expresser::TestAllResponses()
  203. {
  204. IResponseSystem *pResponseSystem = GetOuter()->GetResponseSystem();
  205. if ( pResponseSystem )
  206. {
  207. CUtlVector<AI_Response *> responses;
  208. pResponseSystem->GetAllResponses( &responses );
  209. for ( int i = 0; i < responses.Count(); i++ )
  210. {
  211. const char *szResponse = responses[i]->GetResponsePtr();
  212. Msg( "Response: %s\n", szResponse );
  213. SpeakDispatchResponse( "", *responses[i] );
  214. }
  215. }
  216. }
  217. //-----------------------------------------------------------------------------
  218. static const int LEN_SPECIFIC_SCENE_MODIFIER = strlen( AI_SPECIFIC_SCENE_MODIFIER );
  219. //-----------------------------------------------------------------------------
  220. // Purpose: Searches for a possible response
  221. // Input : concept -
  222. // NULL -
  223. // Output : AI_Response
  224. //-----------------------------------------------------------------------------
  225. bool CAI_Expresser::SpeakFindResponse( AI_Response &outResponse, AIConcept_t concept, const char *modifiers /*= NULL*/ )
  226. {
  227. IResponseSystem *rs = GetOuter()->GetResponseSystem();
  228. if ( !rs )
  229. {
  230. Assert( !"No response system installed for CAI_Expresser::GetOuter()!!!" );
  231. return false;
  232. }
  233. AI_CriteriaSet set;
  234. // Always include the concept name
  235. set.AppendCriteria( "concept", concept, CONCEPT_WEIGHT );
  236. // Always include any optional modifiers
  237. if ( modifiers )
  238. {
  239. char copy_modifiers[ 255 ];
  240. const char *pCopy;
  241. char key[ 128 ] = { 0 };
  242. char value[ 128 ] = { 0 };
  243. Q_strncpy( copy_modifiers, modifiers, sizeof( copy_modifiers ) );
  244. pCopy = copy_modifiers;
  245. while( pCopy )
  246. {
  247. pCopy = SplitContext( pCopy, key, sizeof( key ), value, sizeof( value ), NULL );
  248. if( *key && *value )
  249. {
  250. set.AppendCriteria( key, value, CONCEPT_WEIGHT );
  251. }
  252. }
  253. }
  254. // Let our outer fill in most match criteria
  255. GetOuter()->ModifyOrAppendCriteria( set );
  256. // Append local player criteria to set, but not if this is a player doing the talking
  257. if ( !GetOuter()->IsPlayer() )
  258. {
  259. CBasePlayer *pPlayer = UTIL_PlayerByIndex( 1 );
  260. if( pPlayer )
  261. pPlayer->ModifyOrAppendPlayerCriteria( set );
  262. }
  263. // Now that we have a criteria set, ask for a suitable response
  264. bool found = rs->FindBestResponse( set, outResponse, this );
  265. if ( rr_debugresponses.GetInt() == 3 )
  266. {
  267. if ( ( GetOuter()->MyNPCPointer() && GetOuter()->m_debugOverlays & OVERLAY_NPC_SELECTED_BIT ) || GetOuter()->IsPlayer() )
  268. {
  269. const char *pszName = GetOuter()->IsPlayer() ?
  270. ((CBasePlayer*)GetOuter())->GetPlayerName() : GetOuter()->GetDebugName();
  271. if ( found )
  272. {
  273. const char *szReponse = outResponse.GetResponsePtr();
  274. Warning( "RESPONSERULES: %s spoke '%s'. Found response '%s'.\n", pszName, concept, szReponse );
  275. }
  276. else
  277. {
  278. Warning( "RESPONSERULES: %s spoke '%s'. Found no matching response.\n", pszName, concept );
  279. }
  280. }
  281. }
  282. if ( !found )
  283. return false;
  284. const char *szReponse = outResponse.GetResponsePtr();
  285. if ( !szReponse[0] )
  286. return false;
  287. if ( ( outResponse.GetOdds() < 100 ) && ( random->RandomInt( 1, 100 ) <= outResponse.GetOdds() ) )
  288. return false;
  289. return true;
  290. }
  291. //-----------------------------------------------------------------------------
  292. // Purpose: Dispatches the result
  293. // Input : *response -
  294. //-----------------------------------------------------------------------------
  295. bool CAI_Expresser::SpeakDispatchResponse( AIConcept_t concept, AI_Response& response, IRecipientFilter *filter /* = NULL */ )
  296. {
  297. bool spoke = false;
  298. float delay = response.GetDelay();
  299. const char *szResponse = response.GetResponsePtr();
  300. soundlevel_t soundlevel = response.GetSoundLevel();
  301. if ( IsSpeaking() && concept[0] != 0 )
  302. {
  303. DevMsg( "SpeakDispatchResponse: Entity ( %i/%s ) already speaking, forcing '%s'\n", GetOuter()->entindex(), STRING( GetOuter()->GetEntityName() ), concept );
  304. // Tracker 15911: Can break the game if we stop an imported map placed lcs here, so only
  305. // cancel actor out of instanced scripted scenes. ywb
  306. RemoveActorFromScriptedScenes( GetOuter(), true /*instanced scenes only*/ );
  307. GetOuter()->SentenceStop();
  308. if ( IsRunningScriptedScene( GetOuter() ) )
  309. {
  310. DevMsg( "SpeakDispatchResponse: Entity ( %i/%s ) refusing to speak due to scene entity, tossing '%s'\n", GetOuter()->entindex(), STRING( GetOuter()->GetEntityName() ), concept );
  311. return false;
  312. }
  313. }
  314. switch ( response.GetType() )
  315. {
  316. default:
  317. case RESPONSE_NONE:
  318. break;
  319. case RESPONSE_SPEAK:
  320. if ( !response.ShouldntUseScene() )
  321. {
  322. // This generates a fake CChoreoScene wrapping the sound.txt name
  323. spoke = SpeakAutoGeneratedScene( szResponse, delay );
  324. }
  325. else
  326. {
  327. float speakTime = GetResponseDuration( response );
  328. GetOuter()->EmitSound( szResponse );
  329. DevMsg( "SpeakDispatchResponse: Entity ( %i/%s ) playing sound '%s'\n", GetOuter()->entindex(), STRING( GetOuter()->GetEntityName() ), szResponse );
  330. NoteSpeaking( speakTime, delay );
  331. spoke = true;
  332. }
  333. break;
  334. case RESPONSE_SENTENCE:
  335. spoke = ( -1 != SpeakRawSentence( szResponse, delay, VOL_NORM, soundlevel ) ) ? true : false;
  336. break;
  337. case RESPONSE_SCENE:
  338. spoke = SpeakRawScene( szResponse, delay, &response, filter );
  339. break;
  340. case RESPONSE_RESPONSE:
  341. // This should have been recursively resolved already
  342. Assert( 0 );
  343. break;
  344. case RESPONSE_PRINT:
  345. if ( g_pDeveloper->GetInt() > 0 )
  346. {
  347. Vector vPrintPos;
  348. GetOuter()->CollisionProp()->NormalizedToWorldSpace( Vector(0.5,0.5,1.0f), &vPrintPos );
  349. NDebugOverlay::Text( vPrintPos, szResponse, true, 1.5 );
  350. spoke = true;
  351. }
  352. break;
  353. }
  354. if ( spoke )
  355. {
  356. m_flLastTimeAcceptedSpeak = gpGlobals->curtime;
  357. if ( DebuggingSpeech() && g_pDeveloper->GetInt() > 0 && response.GetType() != RESPONSE_PRINT )
  358. {
  359. Vector vPrintPos;
  360. GetOuter()->CollisionProp()->NormalizedToWorldSpace( Vector(0.5,0.5,1.0f), &vPrintPos );
  361. NDebugOverlay::Text( vPrintPos, CFmtStr( "%s: %s", concept, szResponse ), true, 1.5 );
  362. }
  363. if ( response.IsApplyContextToWorld() )
  364. {
  365. CBaseEntity *pEntity = CBaseEntity::Instance( engine->PEntityOfEntIndex( 0 ) );
  366. if ( pEntity )
  367. {
  368. pEntity->AddContext( response.GetContext() );
  369. }
  370. }
  371. else
  372. {
  373. GetOuter()->AddContext( response.GetContext() );
  374. }
  375. SetSpokeConcept( concept, &response );
  376. }
  377. return spoke;
  378. }
  379. //-----------------------------------------------------------------------------
  380. // Purpose:
  381. // Input : *response -
  382. // Output : float
  383. //-----------------------------------------------------------------------------
  384. float CAI_Expresser::GetResponseDuration( AI_Response& response )
  385. {
  386. const char *szResponse = response.GetResponsePtr();
  387. switch ( response.GetType() )
  388. {
  389. default:
  390. case RESPONSE_NONE:
  391. break;
  392. case RESPONSE_SPEAK:
  393. return GetOuter()->GetSoundDuration( szResponse, STRING( GetOuter()->GetModelName() ) );
  394. case RESPONSE_SENTENCE:
  395. Assert( 0 );
  396. return 999.0f;
  397. case RESPONSE_SCENE:
  398. return GetSceneDuration( szResponse );
  399. case RESPONSE_RESPONSE:
  400. // This should have been recursively resolved already
  401. Assert( 0 );
  402. break;
  403. case RESPONSE_PRINT:
  404. return 1.0;
  405. }
  406. return 0.0f;
  407. }
  408. //-----------------------------------------------------------------------------
  409. // Purpose: Placeholder for rules based response system
  410. // Input : concept -
  411. // Output : Returns true on success, false on failure.
  412. //-----------------------------------------------------------------------------
  413. bool CAI_Expresser::Speak( AIConcept_t concept, const char *modifiers /*= NULL*/, char *pszOutResponseChosen /* = NULL*/, size_t bufsize /* = 0 */, IRecipientFilter *filter /* = NULL */ )
  414. {
  415. AI_Response response;
  416. bool result = SpeakFindResponse( response, concept, modifiers );
  417. if ( !result )
  418. return false;
  419. SpeechMsg( GetOuter(), "%s (%p) spoke %s (%f)\n", STRING(GetOuter()->GetEntityName()), GetOuter(), concept, gpGlobals->curtime );
  420. bool spoke = SpeakDispatchResponse( concept, response, filter );
  421. if ( pszOutResponseChosen )
  422. {
  423. const char *szResponse = response.GetResponsePtr();
  424. Q_strncpy( pszOutResponseChosen, szResponse, bufsize );
  425. }
  426. return spoke;
  427. }
  428. //-----------------------------------------------------------------------------
  429. // Purpose:
  430. //-----------------------------------------------------------------------------
  431. bool CAI_Expresser::SpeakRawScene( const char *pszScene, float delay, AI_Response *response, IRecipientFilter *filter /* = NULL */ )
  432. {
  433. float sceneLength = GetOuter()->PlayScene( pszScene, delay, response, filter );
  434. if ( sceneLength > 0 )
  435. {
  436. SpeechMsg( GetOuter(), "SpeakRawScene( %s, %f) %f\n", pszScene, delay, sceneLength );
  437. #if defined( HL2_EPISODIC ) || defined( TF_DLL )
  438. char szInstanceFilename[256];
  439. GetOuter()->GenderExpandString( pszScene, szInstanceFilename, sizeof( szInstanceFilename ) );
  440. // Only mark ourselves as speaking if the scene has speech
  441. if ( GetSceneSpeechCount(szInstanceFilename) > 0 )
  442. {
  443. NoteSpeaking( sceneLength, delay );
  444. }
  445. #else
  446. NoteSpeaking( sceneLength, delay );
  447. #endif
  448. return true;
  449. }
  450. return false;
  451. }
  452. // This will create a fake .vcd/CChoreoScene to wrap the sound to be played
  453. bool CAI_Expresser::SpeakAutoGeneratedScene( char const *soundname, float delay )
  454. {
  455. float speakTime = GetOuter()->PlayAutoGeneratedSoundScene( soundname );
  456. if ( speakTime > 0 )
  457. {
  458. SpeechMsg( GetOuter(), "SpeakAutoGeneratedScene( %s, %f) %f\n", soundname, delay, speakTime );
  459. NoteSpeaking( speakTime, delay );
  460. return true;
  461. }
  462. return false;
  463. }
  464. //-------------------------------------
  465. int CAI_Expresser::SpeakRawSentence( const char *pszSentence, float delay, float volume, soundlevel_t soundlevel, CBaseEntity *pListener )
  466. {
  467. int sentenceIndex = -1;
  468. if ( !pszSentence )
  469. return sentenceIndex;
  470. if ( pszSentence[0] == AI_SP_SPECIFIC_SENTENCE )
  471. {
  472. sentenceIndex = SENTENCEG_Lookup( pszSentence );
  473. if( sentenceIndex == -1 )
  474. {
  475. // sentence not found
  476. return -1;
  477. }
  478. CPASAttenuationFilter filter( GetOuter(), soundlevel );
  479. CBaseEntity::EmitSentenceByIndex( filter, GetOuter()->entindex(), CHAN_VOICE, sentenceIndex, volume, soundlevel, 0, GetVoicePitch());
  480. }
  481. else
  482. {
  483. sentenceIndex = SENTENCEG_PlayRndSz( GetOuter()->NetworkProp()->edict(), pszSentence, volume, soundlevel, 0, GetVoicePitch() );
  484. }
  485. SpeechMsg( GetOuter(), "SpeakRawSentence( %s, %f) %f\n", pszSentence, delay, engine->SentenceLength( sentenceIndex ) );
  486. NoteSpeaking( engine->SentenceLength( sentenceIndex ), delay );
  487. return sentenceIndex;
  488. }
  489. //-------------------------------------
  490. void CAI_Expresser::BlockSpeechUntil( float time )
  491. {
  492. SpeechMsg( GetOuter(), "BlockSpeechUntil(%f) %f\n", time, time - gpGlobals->curtime );
  493. m_flBlockedTalkTime = time;
  494. }
  495. //-------------------------------------
  496. void CAI_Expresser::NoteSpeaking( float duration, float delay )
  497. {
  498. duration += delay;
  499. GetSink()->OnStartSpeaking();
  500. if ( duration <= 0 )
  501. {
  502. // no duration :(
  503. m_flStopTalkTime = gpGlobals->curtime + 3;
  504. duration = 0;
  505. }
  506. else
  507. {
  508. m_flStopTalkTime = gpGlobals->curtime + duration;
  509. }
  510. m_flStopTalkTimeWithoutDelay = m_flStopTalkTime - delay;
  511. SpeechMsg( GetOuter(), "NoteSpeaking( %f, %f ) (stop at %f)\n", duration, delay, m_flStopTalkTime );
  512. if ( GetSink()->UseSemaphore() )
  513. {
  514. CAI_TimedSemaphore *pSemaphore = GetMySpeechSemaphore( GetOuter() );
  515. if ( pSemaphore )
  516. {
  517. pSemaphore->Acquire( duration, GetOuter() );
  518. }
  519. }
  520. }
  521. //-------------------------------------
  522. void CAI_Expresser::ForceNotSpeaking( void )
  523. {
  524. if ( IsSpeaking() )
  525. {
  526. m_flStopTalkTime = gpGlobals->curtime;
  527. m_flStopTalkTimeWithoutDelay = gpGlobals->curtime;
  528. CAI_TimedSemaphore *pSemaphore = GetMySpeechSemaphore( GetOuter() );
  529. if ( pSemaphore )
  530. {
  531. if ( pSemaphore->GetOwner() == GetOuter() )
  532. {
  533. pSemaphore->Release();
  534. }
  535. }
  536. }
  537. }
  538. //-------------------------------------
  539. bool CAI_Expresser::IsSpeaking( void )
  540. {
  541. if ( m_flStopTalkTime > gpGlobals->curtime )
  542. SpeechMsg( GetOuter(), "IsSpeaking() %f\n", m_flStopTalkTime - gpGlobals->curtime );
  543. if ( m_flLastTimeAcceptedSpeak == gpGlobals->curtime ) // only one speak accepted per think
  544. return true;
  545. return ( m_flStopTalkTime > gpGlobals->curtime );
  546. }
  547. //-------------------------------------
  548. bool CAI_Expresser::CanSpeak()
  549. {
  550. if ( m_flLastTimeAcceptedSpeak == gpGlobals->curtime ) // only one speak accepted per think
  551. return false;
  552. float timeOk = MAX( m_flStopTalkTime, m_flBlockedTalkTime );
  553. return ( timeOk <= gpGlobals->curtime );
  554. }
  555. //-----------------------------------------------------------------------------
  556. // Purpose: Returns true if it's ok for this entity to speak after himself.
  557. // The base CanSpeak() includes the default speech delay, and won't
  558. // return true until that delay time has passed after finishing the
  559. // speech. This returns true as soon as the speech finishes.
  560. //-----------------------------------------------------------------------------
  561. bool CAI_Expresser::CanSpeakAfterMyself()
  562. {
  563. if ( m_flLastTimeAcceptedSpeak == gpGlobals->curtime ) // only one speak accepted per think
  564. return false;
  565. float timeOk = MAX( m_flStopTalkTimeWithoutDelay, m_flBlockedTalkTime );
  566. return ( timeOk <= gpGlobals->curtime );
  567. }
  568. //-------------------------------------
  569. bool CAI_Expresser::CanSpeakConcept( AIConcept_t concept )
  570. {
  571. // Not in history?
  572. int iter = m_ConceptHistories.Find( concept );
  573. if ( iter == m_ConceptHistories.InvalidIndex() )
  574. {
  575. return true;
  576. }
  577. ConceptHistory_t *history = &m_ConceptHistories[iter];
  578. Assert( history );
  579. AI_Response *response = history->response;
  580. if ( !response )
  581. return true;
  582. if ( response->GetSpeakOnce() )
  583. return false;
  584. float respeakDelay = response->GetRespeakDelay();
  585. if ( respeakDelay != 0.0f )
  586. {
  587. if ( history->timeSpoken != -1 && ( gpGlobals->curtime < history->timeSpoken + respeakDelay ) )
  588. return false;
  589. }
  590. return true;
  591. }
  592. //-------------------------------------
  593. bool CAI_Expresser::SpokeConcept( AIConcept_t concept )
  594. {
  595. return GetTimeSpokeConcept( concept ) != -1.f;
  596. }
  597. //-------------------------------------
  598. float CAI_Expresser::GetTimeSpokeConcept( AIConcept_t concept )
  599. {
  600. int iter = m_ConceptHistories.Find( concept );
  601. if ( iter == m_ConceptHistories.InvalidIndex() )
  602. return -1;
  603. ConceptHistory_t *h = &m_ConceptHistories[iter];
  604. return h->timeSpoken;
  605. }
  606. //-------------------------------------
  607. void CAI_Expresser::SetSpokeConcept( AIConcept_t concept, AI_Response *response, bool bCallback )
  608. {
  609. int idx = m_ConceptHistories.Find( concept );
  610. if ( idx == m_ConceptHistories.InvalidIndex() )
  611. {
  612. ConceptHistory_t h;
  613. h.timeSpoken = gpGlobals->curtime;
  614. idx = m_ConceptHistories.Insert( concept, h );
  615. }
  616. ConceptHistory_t *slot = &m_ConceptHistories[ idx ];
  617. slot->timeSpoken = gpGlobals->curtime;
  618. // Update response info
  619. if ( response )
  620. {
  621. delete slot->response;
  622. slot->response = new AI_Response( *response );
  623. }
  624. if ( bCallback )
  625. GetSink()->OnSpokeConcept( concept, response );
  626. }
  627. //-------------------------------------
  628. void CAI_Expresser::ClearSpokeConcept( AIConcept_t concept )
  629. {
  630. m_ConceptHistories.Remove( concept );
  631. }
  632. //-------------------------------------
  633. void CAI_Expresser::DumpHistories()
  634. {
  635. int c = 1;
  636. for ( int i = m_ConceptHistories.First(); i != m_ConceptHistories.InvalidIndex(); i = m_ConceptHistories.Next(i ) )
  637. {
  638. ConceptHistory_t *h = &m_ConceptHistories[ i ];
  639. DevMsg( "%i: %s at %f\n", c++, m_ConceptHistories.GetElementName( i ), h->timeSpoken );
  640. }
  641. }
  642. //-------------------------------------
  643. bool CAI_Expresser::IsValidResponse( ResponseType_t type, const char *pszValue )
  644. {
  645. if ( type == RESPONSE_SCENE )
  646. {
  647. char szInstanceFilename[256];
  648. GetOuter()->GenderExpandString( pszValue, szInstanceFilename, sizeof( szInstanceFilename ) );
  649. return ( GetSceneDuration( szInstanceFilename ) > 0 );
  650. }
  651. return true;
  652. }
  653. //-----------------------------------------------------------------------------
  654. // Purpose:
  655. //-----------------------------------------------------------------------------
  656. CAI_TimedSemaphore *CAI_Expresser::GetMySpeechSemaphore( CBaseEntity *pNpc )
  657. {
  658. if ( !pNpc->MyNPCPointer() )
  659. return NULL;
  660. return (pNpc->MyNPCPointer()->IsPlayerAlly() ? &g_AIFriendliesTalkSemaphore : &g_AIFoesTalkSemaphore );
  661. }
  662. //-----------------------------------------------------------------------------
  663. // Purpose:
  664. //-----------------------------------------------------------------------------
  665. void CAI_Expresser::SpeechMsg( CBaseEntity *pFlex, const char *pszFormat, ... )
  666. {
  667. if ( !DebuggingSpeech() )
  668. return;
  669. char string[ 2048 ];
  670. va_list argptr;
  671. va_start( argptr, pszFormat );
  672. Q_vsnprintf( string, sizeof(string), pszFormat, argptr );
  673. va_end( argptr );
  674. if ( pFlex->MyNPCPointer() )
  675. {
  676. DevMsg( pFlex->MyNPCPointer(), "%s", string );
  677. }
  678. else
  679. {
  680. DevMsg( "%s", string );
  681. }
  682. UTIL_LogPrintf( "%s", string );
  683. }
  684. //-----------------------------------------------------------------------------
  685. void CAI_ExpresserHost_NPC_DoModifyOrAppendCriteria( CAI_BaseNPC *pSpeaker, AI_CriteriaSet& set )
  686. {
  687. // Append current activity name
  688. const char *pActivityName = pSpeaker->GetActivityName( pSpeaker->GetActivity() );
  689. if ( pActivityName )
  690. {
  691. set.AppendCriteria( "activity", pActivityName );
  692. }
  693. static const char *pStateNames[] = { "None", "Idle", "Alert", "Combat", "Scripted", "PlayDead", "Dead" };
  694. if ( (int)pSpeaker->m_NPCState < ARRAYSIZE(pStateNames) )
  695. {
  696. set.AppendCriteria( "npcstate", UTIL_VarArgs( "[NPCState::%s]", pStateNames[pSpeaker->m_NPCState] ) );
  697. }
  698. if ( pSpeaker->GetEnemy() )
  699. {
  700. set.AppendCriteria( "enemy", pSpeaker->GetEnemy()->GetClassname() );
  701. set.AppendCriteria( "timesincecombat", "-1" );
  702. }
  703. else
  704. {
  705. if ( pSpeaker->GetLastEnemyTime() == 0.0 )
  706. set.AppendCriteria( "timesincecombat", "999999.0" );
  707. else
  708. set.AppendCriteria( "timesincecombat", UTIL_VarArgs( "%f", gpGlobals->curtime - pSpeaker->GetLastEnemyTime() ) );
  709. }
  710. set.AppendCriteria( "speed", UTIL_VarArgs( "%.3f", pSpeaker->GetSmoothedVelocity().Length() ) );
  711. CBaseCombatWeapon *weapon = pSpeaker->GetActiveWeapon();
  712. if ( weapon )
  713. {
  714. set.AppendCriteria( "weapon", weapon->GetClassname() );
  715. }
  716. else
  717. {
  718. set.AppendCriteria( "weapon", "none" );
  719. }
  720. CBasePlayer *pPlayer = AI_GetSinglePlayer();
  721. if ( pPlayer )
  722. {
  723. Vector distance = pPlayer->GetAbsOrigin() - pSpeaker->GetAbsOrigin();
  724. set.AppendCriteria( "distancetoplayer", UTIL_VarArgs( "%f", distance.Length() ) );
  725. }
  726. else
  727. {
  728. set.AppendCriteria( "distancetoplayer", UTIL_VarArgs( "%i", MAX_COORD_RANGE ) );
  729. }
  730. if ( pSpeaker->HasCondition( COND_SEE_PLAYER ) )
  731. {
  732. set.AppendCriteria( "seeplayer", "1" );
  733. }
  734. else
  735. {
  736. set.AppendCriteria( "seeplayer", "0" );
  737. }
  738. if ( pPlayer && pPlayer->FInViewCone( pSpeaker ) && pPlayer->FVisible( pSpeaker ) )
  739. {
  740. set.AppendCriteria( "seenbyplayer", "1" );
  741. }
  742. else
  743. {
  744. set.AppendCriteria( "seenbyplayer", "0" );
  745. }
  746. }
  747. //-----------------------------------------------------------------------------
  748. //=============================================================================
  749. // HPE_BEGIN:
  750. // [Forrest] Remove npc_speakall from Counter-Strike.
  751. //=============================================================================
  752. #ifndef CSTRIKE_DLL
  753. extern CBaseEntity *FindPickerEntity( CBasePlayer *pPlayer );
  754. CON_COMMAND( npc_speakall, "Force the npc to try and speak all their responses" )
  755. {
  756. if ( !UTIL_IsCommandIssuedByServerAdmin() )
  757. return;
  758. CBaseEntity *pEntity;
  759. if ( args[1] && *args[1] )
  760. {
  761. pEntity = gEntList.FindEntityByName( NULL, args[1], NULL );
  762. if ( !pEntity )
  763. {
  764. pEntity = gEntList.FindEntityByClassname( NULL, args[1] );
  765. }
  766. }
  767. else
  768. {
  769. pEntity = FindPickerEntity( UTIL_GetCommandClient() );
  770. }
  771. if ( pEntity )
  772. {
  773. CAI_BaseNPC *pNPC = pEntity->MyNPCPointer();
  774. if (pNPC)
  775. {
  776. if ( pNPC->GetExpresser() )
  777. {
  778. bool save = engine->LockNetworkStringTables( false );
  779. pNPC->GetExpresser()->TestAllResponses();
  780. engine->LockNetworkStringTables( save );
  781. }
  782. }
  783. }
  784. }
  785. #endif
  786. //=============================================================================
  787. // HPE_END
  788. //=============================================================================
  789. //-----------------------------------------------------------------------------
  790. CMultiplayer_Expresser::CMultiplayer_Expresser( CBaseFlex *pOuter ) : CAI_Expresser( pOuter )
  791. {
  792. m_bAllowMultipleScenes = false;
  793. }
  794. bool CMultiplayer_Expresser::IsSpeaking( void )
  795. {
  796. if ( m_bAllowMultipleScenes )
  797. {
  798. return false;
  799. }
  800. return CAI_Expresser::IsSpeaking();
  801. }
  802. void CMultiplayer_Expresser::AllowMultipleScenes()
  803. {
  804. m_bAllowMultipleScenes = true;
  805. }
  806. void CMultiplayer_Expresser::DisallowMultipleScenes()
  807. {
  808. m_bAllowMultipleScenes = false;
  809. }