Counter Strike : Global Offensive Source Code
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1408 lines
37 KiB

  1. //========= Copyright 1996-2005, Valve Corporation, All rights reserved. ============//
  2. //
  3. // Purpose:
  4. //
  5. // $NoKeywords: $
  6. //=============================================================================//
  7. #include "cbase.h"
  8. #include "ai_speech.h"
  9. #include "game.h"
  10. #include "engine/IEngineSound.h"
  11. #include "keyvalues.h"
  12. #include "ai_basenpc.h"
  13. #include "ai_criteria.h"
  14. #include "isaverestore.h"
  15. #include "sceneentity.h"
  16. #include "ai_speechqueue.h"
  17. #include "cs_gamerules.h"
  18. // memdbgon must be the last include file in a .cpp file!!!
  19. #include <tier0/memdbgon.h>
  20. #define DEBUG_AISPEECH 1
  21. #ifdef DEBUG_AISPEECH
  22. ConVar ai_debug_speech( "ai_debug_speech", "0" );
  23. #define DebuggingSpeech() ai_debug_speech.GetBool()
  24. #else
  25. inline void SpeechMsg( ... ) {}
  26. #define DebuggingSpeech() (false)
  27. #endif
  28. #ifdef _PS3
  29. #define strtok_s strtok_r
  30. #endif
  31. extern ConVar rr_debugresponses;
  32. //-----------------------------------------------------------------------------
  33. CAI_TimedSemaphore g_AIFriendliesTalkSemaphore;
  34. CAI_TimedSemaphore g_AIFoesTalkSemaphore;
  35. ConceptHistory_t::~ConceptHistory_t()
  36. {
  37. }
  38. ConceptHistory_t::ConceptHistory_t( const ConceptHistory_t& src )
  39. {
  40. timeSpoken = src.timeSpoken;
  41. m_response = src.m_response ;
  42. }
  43. ConceptHistory_t& ConceptHistory_t::operator =( const ConceptHistory_t& src )
  44. {
  45. if ( this == &src )
  46. return *this;
  47. timeSpoken = src.timeSpoken;
  48. m_response = src.m_response ;
  49. return *this;
  50. }
  51. BEGIN_SIMPLE_DATADESC( ConceptHistory_t )
  52. DEFINE_FIELD( timeSpoken, FIELD_TIME ), // Relative to server time
  53. // DEFINE_EMBEDDED( response, FIELD_??? ), // This is manually saved/restored by the ConceptHistory saverestore ops below
  54. END_DATADESC()
  55. class CConceptHistoriesDataOps : public CDefSaveRestoreOps
  56. {
  57. public:
  58. virtual void Save( const SaveRestoreFieldInfo_t &fieldInfo, ISave *pSave )
  59. {
  60. CUtlDict< ConceptHistory_t, int > *ch = ((CUtlDict< ConceptHistory_t, int > *)fieldInfo.pField);
  61. int count = ch->Count();
  62. pSave->WriteInt( &count );
  63. for ( int i = 0 ; i < count; i++ )
  64. {
  65. ConceptHistory_t *pHistory = &(*ch)[ i ];
  66. pSave->StartBlock();
  67. {
  68. // Write element name
  69. pSave->WriteString( ch->GetElementName( i ) );
  70. // Write data
  71. pSave->WriteAll( pHistory );
  72. // Write response blob
  73. bool hasresponse = !pHistory->m_response.IsEmpty() ;
  74. pSave->WriteBool( &hasresponse );
  75. if ( hasresponse )
  76. {
  77. pSave->WriteAll( &pHistory->m_response );
  78. }
  79. // TODO: Could blat out pHistory->criteria pointer here, if it's needed
  80. }
  81. pSave->EndBlock();
  82. }
  83. }
  84. virtual void Restore( const SaveRestoreFieldInfo_t &fieldInfo, IRestore *pRestore )
  85. {
  86. CUtlDict< ConceptHistory_t, int > *ch = ((CUtlDict< ConceptHistory_t, int > *)fieldInfo.pField);
  87. int count = pRestore->ReadInt();
  88. Assert( count >= 0 );
  89. for ( int i = 0 ; i < count; i++ )
  90. {
  91. char conceptname[ 512 ];
  92. conceptname[ 0 ] = 0;
  93. ConceptHistory_t history;
  94. pRestore->StartBlock();
  95. {
  96. pRestore->ReadString( conceptname, sizeof( conceptname ), 0 );
  97. pRestore->ReadAll( &history );
  98. bool hasresponse = false;
  99. pRestore->ReadBool( &hasresponse );
  100. if ( hasresponse )
  101. {
  102. history.m_response;
  103. pRestore->ReadAll( &history.m_response );
  104. }
  105. else
  106. {
  107. history.m_response.Invalidate();
  108. }
  109. }
  110. pRestore->EndBlock();
  111. // TODO: Could restore pHistory->criteria pointer here, if it's needed
  112. // Add to utldict
  113. if ( conceptname[0] != 0 )
  114. {
  115. ch->Insert( conceptname, history );
  116. }
  117. else
  118. {
  119. Assert( !"Error restoring ConceptHistory_t, discarding!" );
  120. }
  121. }
  122. }
  123. virtual void MakeEmpty( const SaveRestoreFieldInfo_t &fieldInfo )
  124. {
  125. }
  126. virtual bool IsEmpty( const SaveRestoreFieldInfo_t &fieldInfo )
  127. {
  128. CUtlDict< ConceptHistory_t, int > *ch = ((CUtlDict< ConceptHistory_t, int > *)fieldInfo.pField);
  129. return ch->Count() == 0 ? true : false;
  130. }
  131. };
  132. CConceptHistoriesDataOps g_ConceptHistoriesSaveDataOps;
  133. /////////////////////////////////////////////////
  134. // context operators
  135. RR::CApplyContextOperator RR::sm_OpCopy(0); // "
  136. RR::CIncrementOperator RR::sm_OpIncrement(2); // "++"
  137. RR::CDecrementOperator RR::sm_OpDecrement(2); // "--"
  138. RR::CToggleOperator RR::sm_OpToggle(1); // "!"
  139. RR::CApplyContextOperator *RR::CApplyContextOperator::FindOperator( const char *pContextString )
  140. {
  141. if ( !pContextString || pContextString[0] == 0 )
  142. {
  143. return &sm_OpCopy;
  144. }
  145. if ( pContextString[0] == '+' && pContextString [1] == '+' && pContextString[2] != '\0' )
  146. {
  147. return &sm_OpIncrement;
  148. }
  149. else if ( pContextString[0] == '-' && pContextString [1] == '-' && pContextString[2] != '\0' )
  150. {
  151. return &sm_OpDecrement;
  152. }
  153. else if ( pContextString[0] == '!' )
  154. {
  155. return &sm_OpToggle;
  156. }
  157. else
  158. {
  159. return &sm_OpCopy;
  160. }
  161. }
  162. // default is just copy
  163. bool RR::CApplyContextOperator::Apply( const char *pOldValue, const char *pOperator, char *pNewValue, int pNewValBufSize )
  164. {
  165. Assert( pOperator && pNewValue && pNewValBufSize > 0 );
  166. Assert( m_nSkipChars == 0 );
  167. if ( pOperator )
  168. {
  169. V_strncpy( pNewValue, pOperator, pNewValBufSize );
  170. }
  171. else
  172. {
  173. *pNewValue = 0;
  174. }
  175. return true;
  176. }
  177. bool RR::CIncrementOperator::Apply( const char *pOldValue, const char *pOperator, char *pNewValue, int pNewValBufSize )
  178. {
  179. Assert( pOperator[0] == '+' && pOperator[1] == '+' );
  180. // parse out the old value as a numeric
  181. int nOld = pOldValue ? V_atoi(pOldValue) : 0;
  182. int nInc = V_atoi( pOperator+m_nSkipChars );
  183. V_snprintf( pNewValue, pNewValBufSize, "%d", nOld+nInc );
  184. return true;
  185. }
  186. bool RR::CDecrementOperator::Apply( const char *pOldValue, const char *pOperator, char *pNewValue, int pNewValBufSize )
  187. {
  188. Assert( pOperator[0] == '-' && pOperator[1] == '-' );
  189. // parse out the old value as a numeric
  190. int nOld = pOldValue ? V_atoi(pOldValue) : 0;
  191. int nInc = V_atoi( pOperator+m_nSkipChars );
  192. V_snprintf( pNewValue, pNewValBufSize, "%d", nOld-nInc );
  193. return true;
  194. }
  195. bool RR::CToggleOperator::Apply( const char *pOldValue, const char *pOperator, char *pNewValue, int pNewValBufSize )
  196. {
  197. Assert( pOperator[0] == '!' );
  198. // parse out the old value as a numeric
  199. int nOld = pOldValue ? V_atoi(pOldValue) : 0;
  200. V_snprintf( pNewValue, pNewValBufSize, "%d", nOld ? 0 : 1 );
  201. return true;
  202. }
  203. //-----------------------------------------------------------------------------
  204. //
  205. // CLASS: CAI_Expresser
  206. //
  207. BEGIN_SIMPLE_DATADESC( CAI_Expresser )
  208. // m_pSink (reconnected on load)
  209. // DEFINE_FIELD( m_pOuter, CHandle < CBaseFlex > ),
  210. DEFINE_CUSTOM_FIELD( m_ConceptHistories, &g_ConceptHistoriesSaveDataOps ),
  211. DEFINE_FIELD( m_flStopTalkTime, FIELD_TIME ),
  212. DEFINE_FIELD( m_flStopTalkTimeWithoutDelay, FIELD_TIME ),
  213. DEFINE_FIELD( m_flBlockedTalkTime, FIELD_TIME ),
  214. DEFINE_FIELD( m_voicePitch, FIELD_INTEGER ),
  215. DEFINE_FIELD( m_flLastTimeAcceptedSpeak, FIELD_TIME ),
  216. END_DATADESC()
  217. //-------------------------------------
  218. bool CAI_Expresser::SemaphoreIsAvailable( CBaseEntity *pTalker )
  219. {
  220. if ( !GetSink()->UseSemaphore() )
  221. return true;
  222. CAI_TimedSemaphore *pSemaphore = GetMySpeechSemaphore( pTalker->MyNPCPointer() );
  223. return (pSemaphore ? pSemaphore->IsAvailable( pTalker ) : true);
  224. }
  225. //-------------------------------------
  226. float CAI_Expresser::GetSemaphoreAvailableTime( CBaseEntity *pTalker )
  227. {
  228. CAI_TimedSemaphore *pSemaphore = GetMySpeechSemaphore( pTalker->MyNPCPointer() );
  229. return (pSemaphore ? pSemaphore->GetReleaseTime() : 0);
  230. }
  231. //-------------------------------------
  232. int CAI_Expresser::GetVoicePitch() const
  233. {
  234. return m_voicePitch + random->RandomInt(0,3);
  235. }
  236. #ifdef DEBUG
  237. static int g_nExpressers;
  238. #endif
  239. /*
  240. inline bool ShouldBeInExpresserQueue( CBaseFlex *pOuter )
  241. {
  242. return true; // return IsTerrorPlayer( pOuter, TEAM_SURVIVOR );
  243. }
  244. */
  245. CAI_Expresser::CAI_Expresser( CBaseFlex *pOuter )
  246. : m_pOuter( pOuter ),
  247. m_pSink( NULL ),
  248. m_flStopTalkTime( 0 ),
  249. m_flBlockedTalkTime( 0 ),
  250. m_flStopTalkTimeWithoutDelay( 0 ),
  251. m_voicePitch( 100 ),
  252. m_flLastTimeAcceptedSpeak( 0 )
  253. {
  254. #ifdef DEBUG
  255. g_nExpressers++;
  256. #endif
  257. if (m_pOuter)
  258. {
  259. // register me with the global expresser queue.
  260. #ifndef TERROR
  261. // L4D: something a little ass backwards is happening here. We only want
  262. // survivors to be in the queue. However, the team number isn't
  263. // specified yet. So, we actually need to do this in the player's ChangeTeam.
  264. g_ResponseQueueManager.GetQueue()->AddExpresserHost(m_pOuter);
  265. #endif
  266. }
  267. }
  268. CAI_Expresser::~CAI_Expresser()
  269. {
  270. m_ConceptHistories.Purge();
  271. CBaseFlex *RESTRICT outer = GetOuter();
  272. if ( outer )
  273. {
  274. CAI_TimedSemaphore *pSemaphore = GetMySpeechSemaphore( outer );
  275. if ( pSemaphore )
  276. {
  277. if ( pSemaphore->GetOwner() == outer )
  278. pSemaphore->Release();
  279. #ifdef DEBUG
  280. g_nExpressers--;
  281. if ( g_nExpressers == 0 && pSemaphore->GetOwner() )
  282. DevMsg( 2, "Speech semaphore being held by non-talker entity\n" );
  283. #endif
  284. }
  285. g_ResponseQueueManager.GetQueue()->RemoveExpresserHost(outer);
  286. }
  287. }
  288. //-----------------------------------------------------------------------------
  289. void CAI_Expresser::TestAllResponses()
  290. {
  291. IResponseSystem *pResponseSystem = GetOuter()->GetResponseSystem();
  292. if ( pResponseSystem )
  293. {
  294. CUtlVector<AI_Response> responses;
  295. pResponseSystem->GetAllResponses( &responses );
  296. for ( int i = 0; i < responses.Count(); i++ )
  297. {
  298. char response[ 256 ];
  299. responses[i].GetResponse( response, sizeof( response ) );
  300. Msg( "Response: %s\n", response );
  301. AIConcept_t concept;
  302. SpeakDispatchResponse( concept, &responses[i], NULL );
  303. }
  304. }
  305. }
  306. //-----------------------------------------------------------------------------
  307. void CAI_Expresser::SetOuter( CBaseFlex *pOuter )
  308. {
  309. // if we're changing outers (which is a strange thing to do)
  310. // unregister the old one from the queue.
  311. if ( m_pOuter && ( m_pOuter != pOuter ) )
  312. {
  313. AssertMsg2( false, "Expresser is switching its Outer from %s to %s. Why?", m_pOuter->GetDebugName(), pOuter->GetDebugName() );
  314. // unregister me with the global expresser queue
  315. g_ResponseQueueManager.GetQueue()->RemoveExpresserHost(m_pOuter);
  316. }
  317. m_pOuter = pOuter;
  318. }
  319. //-----------------------------------------------------------------------------
  320. static const int LEN_SPECIFIC_SCENE_MODIFIER = strlen( AI_SPECIFIC_SCENE_MODIFIER );
  321. // This function appends "Global world" criteria that are always added to
  322. // any character doing any match. This represents global concepts like weather, who's
  323. // alive, etc.
  324. static void ModifyOrAppendGlobalCriteria( AI_CriteriaSet * RESTRICT outputSet )
  325. {
  326. // Add the round duration and current round time
  327. if ( CSGameRules() )
  328. {
  329. outputSet->AppendCriteria( "RoundLength", CSGameRules()->GetRoundLength() );
  330. outputSet->AppendCriteria( "RoundElapsedTime", CSGameRules()->GetRoundElapsedTime() );
  331. }
  332. }
  333. void CAI_Expresser::GatherCriteria( AI_CriteriaSet * RESTRICT outputSet, const AIConcept_t &concept, const char * RESTRICT modifiers )
  334. {
  335. // Always include the concept name
  336. outputSet->AppendCriteria( "concept", concept, CONCEPT_WEIGHT );
  337. #if 1
  338. outputSet->Merge( modifiers );
  339. #else
  340. // Always include any optional modifiers
  341. if ( modifiers != NULL )
  342. {
  343. char copy_modifiers[ 255 ];
  344. const char *pCopy;
  345. char key[ 128 ] = { 0 };
  346. char value[ 128 ] = { 0 };
  347. Q_strncpy( copy_modifiers, modifiers, sizeof( copy_modifiers ) );
  348. pCopy = copy_modifiers;
  349. while( pCopy )
  350. {
  351. pCopy = SplitContext( pCopy, key, sizeof( key ), value, sizeof( value ), NULL, modifiers );
  352. if( *key && *value )
  353. {
  354. outputSet->AppendCriteria( key, value, CONCEPT_WEIGHT );
  355. }
  356. }
  357. }
  358. #endif
  359. // include any global criteria
  360. ModifyOrAppendGlobalCriteria( outputSet );
  361. // Let our outer fill in most match criteria
  362. GetOuter()->ModifyOrAppendCriteria( *outputSet );
  363. // Append local player criteria to set, but not if this is a player doing the talking
  364. if ( !GetOuter()->IsPlayer() )
  365. {
  366. CBasePlayer *pPlayer = UTIL_PlayerByIndex( 1 );
  367. if( pPlayer )
  368. pPlayer->ModifyOrAppendPlayerCriteria( *outputSet );
  369. }
  370. }
  371. //-----------------------------------------------------------------------------
  372. // Purpose: Searches for a possible response
  373. // Input : concept -
  374. // NULL -
  375. // Output : AI_Response
  376. //-----------------------------------------------------------------------------
  377. // AI_Response *CAI_Expresser::SpeakFindResponse( AIConcept_t concept, const char *modifiers /*= NULL*/ )
  378. bool CAI_Expresser::FindResponse( AI_Response &outResponse, AIConcept_t &concept, AI_CriteriaSet *criteria )
  379. {
  380. VPROF("CAI_Expresser::FindResponse");
  381. IResponseSystem *rs = GetOuter()->GetResponseSystem();
  382. if ( !rs )
  383. {
  384. Assert( !"No response system installed for CAI_Expresser::GetOuter()!!!" );
  385. return NULL;
  386. }
  387. // if I'm dead, I can't possibly match dialog.
  388. if ( !GetOuter()->IsAlive() )
  389. {
  390. return false;
  391. }
  392. #if 0 // this is the old technique, where we always gathered criteria in this function
  393. AI_CriteriaSet set;
  394. // Always include the concept name
  395. set.AppendCriteria( "concept", concept, CONCEPT_WEIGHT );
  396. // Always include any optional modifiers
  397. if ( modifiers != NULL )
  398. {
  399. char copy_modifiers[ 255 ];
  400. const char *pCopy;
  401. char key[ 128 ] = { 0 };
  402. char value[ 128 ] = { 0 };
  403. Q_strncpy( copy_modifiers, modifiers, sizeof( copy_modifiers ) );
  404. pCopy = copy_modifiers;
  405. while( pCopy )
  406. {
  407. pCopy = SplitContext( pCopy, key, sizeof( key ), value, sizeof( value ), NULL, modifiers );
  408. if( *key && *value )
  409. {
  410. set.AppendCriteria( key, value, CONCEPT_WEIGHT );
  411. }
  412. }
  413. }
  414. // Let our outer fill in most match criteria
  415. GetOuter()->ModifyOrAppendCriteria( set );
  416. // Append local player criteria to set, but not if this is a player doing the talking
  417. if ( !GetOuter()->IsPlayer() )
  418. {
  419. CBasePlayer *pPlayer = UTIL_PlayerByIndex( 1 );
  420. if( pPlayer )
  421. pPlayer->ModifyOrAppendPlayerCriteria( set );
  422. }
  423. #else
  424. AI_CriteriaSet localCriteriaSet; // put it on the stack so we don't deal with new/delete
  425. if (criteria == NULL)
  426. {
  427. GatherCriteria( &localCriteriaSet, concept, NULL );
  428. criteria = &localCriteriaSet;
  429. }
  430. #endif
  431. /// intercept any deferred criteria that are being sent to world
  432. AI_CriteriaSet worldWritebackCriteria;
  433. AI_CriteriaSet::InterceptWorldSetContexts( criteria, &worldWritebackCriteria );
  434. // Now that we have a criteria set, ask for a suitable response
  435. bool found = rs->FindBestResponse( *criteria, outResponse, this );
  436. if ( rr_debugresponses.GetInt() == 4 )
  437. {
  438. if ( ( GetOuter()->MyNPCPointer() && GetOuter()->m_debugOverlays & OVERLAY_NPC_SELECTED_BIT ) || GetOuter()->IsPlayer() )
  439. {
  440. const char *pszName;
  441. if ( GetOuter()->IsPlayer() )
  442. {
  443. pszName = ((CBasePlayer*)GetOuter())->GetPlayerName();
  444. }
  445. else
  446. {
  447. pszName = GetOuter()->GetDebugName();
  448. }
  449. if ( found )
  450. {
  451. char response[ 256 ];
  452. outResponse.GetResponse( response, sizeof( response ) );
  453. Warning( "RESPONSERULES: %s spoke '%s'. Found response '%s'.\n", pszName, (const char*)concept, response );
  454. }
  455. else
  456. {
  457. Warning( "RESPONSERULES: %s spoke '%s'. Found no matching response.\n", pszName, (const char*)concept );
  458. }
  459. }
  460. }
  461. if ( !found )
  462. {
  463. return false;
  464. }
  465. else if ( worldWritebackCriteria.GetCount() > 0 )
  466. {
  467. Assert( CBaseEntity::Instance( INDEXENT( 0 ) )->IsWorld( ) );
  468. worldWritebackCriteria.WriteToEntity( CBaseEntity::Instance( INDEXENT( 0 ) ) );
  469. }
  470. if ( outResponse.IsEmpty() )
  471. {
  472. // AssertMsg2( false, "RR: %s got empty but valid response for %s", GetOuter()->GetDebugName(), concept.GetStringConcept() );
  473. return false;
  474. }
  475. else
  476. {
  477. return true;
  478. }
  479. }
  480. #if 0
  481. //-----------------------------------------------------------------------------
  482. // Purpose: Searches for a possible response; writes it into a response passed as
  483. // parameter rather than new'ing one up.
  484. // Input : concept -
  485. // NULL -
  486. // Output : bool : true on success, false on fail
  487. //-----------------------------------------------------------------------------
  488. AI_Response *CAI_Expresser::SpeakFindResponse( AI_Response *result, AIConcept_t &concept, AI_CriteriaSet *criteria )
  489. {
  490. Assert(response);
  491. IResponseSystem *rs = GetOuter()->GetResponseSystem();
  492. if ( !rs )
  493. {
  494. Assert( !"No response system installed for CAI_Expresser::GetOuter()!!!" );
  495. return NULL;
  496. }
  497. #if 0
  498. AI_CriteriaSet set;
  499. // Always include the concept name
  500. set.AppendCriteria( "concept", concept, CONCEPT_WEIGHT );
  501. // Always include any optional modifiers
  502. if ( modifiers != NULL )
  503. {
  504. char copy_modifiers[ 255 ];
  505. const char *pCopy;
  506. char key[ 128 ] = { 0 };
  507. char value[ 128 ] = { 0 };
  508. Q_strncpy( copy_modifiers, modifiers, sizeof( copy_modifiers ) );
  509. pCopy = copy_modifiers;
  510. while( pCopy )
  511. {
  512. pCopy = SplitContext( pCopy, key, sizeof( key ), value, sizeof( value ), NULL, modifiers );
  513. if( *key && *value )
  514. {
  515. set.AppendCriteria( key, value, CONCEPT_WEIGHT );
  516. }
  517. }
  518. }
  519. // Let our outer fill in most match criteria
  520. GetOuter()->ModifyOrAppendCriteria( set );
  521. // Append local player criteria to set, but not if this is a player doing the talking
  522. if ( !GetOuter()->IsPlayer() )
  523. {
  524. CBasePlayer *pPlayer = UTIL_PlayerByIndex( 1 );
  525. if( pPlayer )
  526. pPlayer->ModifyOrAppendPlayerCriteria( set );
  527. }
  528. #else
  529. AI_CriteriaSet &set = *criteria;
  530. #endif
  531. // Now that we have a criteria set, ask for a suitable response
  532. bool found = rs->FindBestResponse( set, *result, this );
  533. if ( rr_debugresponses.GetInt() == 4 )
  534. {
  535. if ( ( GetOuter()->MyNPCPointer() && GetOuter()->m_debugOverlays & OVERLAY_NPC_SELECTED_BIT ) || GetOuter()->IsPlayer() )
  536. {
  537. const char *pszName;
  538. if ( GetOuter()->IsPlayer() )
  539. {
  540. pszName = ((CBasePlayer*)GetOuter())->GetPlayerName();
  541. }
  542. else
  543. {
  544. pszName = GetOuter()->GetDebugName();
  545. }
  546. if ( found )
  547. {
  548. char response[ 256 ];
  549. result->GetResponse( response, sizeof( response ) );
  550. Warning( "RESPONSERULES: %s spoke '%s'. Found response '%s'.\n", pszName, concept, response );
  551. }
  552. else
  553. {
  554. Warning( "RESPONSERULES: %s spoke '%s'. Found no matching response.\n", pszName, concept );
  555. }
  556. }
  557. }
  558. if ( !found )
  559. {
  560. //Assert( !"rs->FindBestResponse: Returned a NULL AI_Response!" );
  561. return false;
  562. }
  563. char response[ 256 ];
  564. result->GetResponse( response, sizeof( response ) );
  565. if ( !response[0] )
  566. {
  567. return false;
  568. }
  569. return true;
  570. }
  571. #endif
  572. //-----------------------------------------------------------------------------
  573. // Purpose: Dispatches the result
  574. // Input : *response -
  575. //-----------------------------------------------------------------------------
  576. bool CAI_Expresser::SpeakDispatchResponse( AIConcept_t &concept, AI_Response *result, AI_CriteriaSet *criteria, IRecipientFilter *filter /* = NULL */ )
  577. {
  578. char response[ 256 ];
  579. result->GetResponse( response, sizeof( response ) );
  580. float delay = result->GetDelay();
  581. bool spoke = false;
  582. soundlevel_t soundlevel = result->GetSoundLevel();
  583. if ( IsSpeaking() && concept[0] != 0 && result->GetType() != ResponseRules::RESPONSE_PRINT )
  584. {
  585. const char *entityName = STRING( GetOuter()->GetEntityName() );
  586. if ( GetOuter()->IsPlayer() )
  587. {
  588. entityName = ToBasePlayer( GetOuter() )->GetPlayerName();
  589. }
  590. DevMsg( 2, "SpeakDispatchResponse: Entity ( %i/%s ) already speaking, forcing '%s'\n", GetOuter()->entindex(), entityName ? entityName : "UNKNOWN", (const char*)concept );
  591. // Tracker 15911: Can break the game if we stop an imported map placed lcs here, so only
  592. // cancel actor out of instanced scripted scenes. ywb
  593. RemoveActorFromScriptedScenes( GetOuter(), true /*instanced scenes only*/ );
  594. GetOuter()->SentenceStop();
  595. if ( IsRunningScriptedScene( GetOuter() ) )
  596. {
  597. DevMsg( "SpeakDispatchResponse: Entity ( %i/%s ) refusing to speak due to scene entity, tossing '%s'\n", GetOuter()->entindex(), entityName ? entityName : "UNKNOWN", (const char*)concept );
  598. return false;
  599. }
  600. }
  601. switch ( result->GetType() )
  602. {
  603. default:
  604. case ResponseRules::RESPONSE_NONE:
  605. break;
  606. case ResponseRules::RESPONSE_SPEAK:
  607. {
  608. if ( !result->ShouldntUseScene() )
  609. {
  610. // This generates a fake CChoreoScene wrapping the sound.txt name
  611. spoke = SpeakAutoGeneratedScene( response, delay );
  612. }
  613. else
  614. {
  615. float speakTime = GetResponseDuration( result );
  616. GetOuter()->EmitSound( response );
  617. DevMsg( 2, "SpeakDispatchResponse: Entity ( %i/%s ) playing sound '%s'\n", GetOuter()->entindex(), STRING( GetOuter()->GetEntityName() ), response );
  618. NoteSpeaking( speakTime, delay );
  619. spoke = true;
  620. }
  621. }
  622. break;
  623. case ResponseRules::RESPONSE_SENTENCE:
  624. {
  625. spoke = ( -1 != SpeakRawSentence( response, delay, VOL_NORM, soundlevel ) ) ? true : false;
  626. }
  627. break;
  628. case ResponseRules::RESPONSE_SCENE:
  629. {
  630. spoke = SpeakRawScene( response, delay, result, filter );
  631. }
  632. break;
  633. case ResponseRules::RESPONSE_RESPONSE:
  634. {
  635. // This should have been recursively resolved already
  636. Assert( 0 );
  637. }
  638. break;
  639. case ResponseRules::RESPONSE_PRINT:
  640. {
  641. if ( g_pDeveloper->GetInt() > 0 )
  642. {
  643. Vector vPrintPos;
  644. GetOuter()->CollisionProp()->NormalizedToWorldSpace( Vector(0.5,0.5,1.0f), &vPrintPos );
  645. NDebugOverlay::Text( vPrintPos, response, true, 1.5 );
  646. }
  647. spoke = true;
  648. }
  649. break;
  650. case ResponseRules::RESPONSE_ENTITYIO:
  651. {
  652. return FireEntIOFromResponse( response, GetOuter() );
  653. }
  654. break;
  655. }
  656. if ( spoke )
  657. {
  658. m_flLastTimeAcceptedSpeak = gpGlobals->curtime;
  659. if ( DebuggingSpeech() && g_pDeveloper->GetInt() > 0 && response && result->GetType() != ResponseRules::RESPONSE_PRINT )
  660. {
  661. Vector vPrintPos;
  662. GetOuter()->CollisionProp()->NormalizedToWorldSpace( Vector(0.5,0.5,1.0f), &vPrintPos );
  663. NDebugOverlay::Text( vPrintPos, CFmtStr( "%s: %s", (const char*)concept, response ), true, 1.5 );
  664. }
  665. if ( result->IsApplyContextToWorld() )
  666. {
  667. CBaseEntity *pEntity = CBaseEntity::Instance( INDEXENT( 0 ) );
  668. if ( pEntity )
  669. {
  670. pEntity->AddContext( result->GetContext() );
  671. }
  672. }
  673. else
  674. {
  675. GetOuter()->AddContext( result->GetContext() );
  676. }
  677. SetSpokeConcept( concept, result );
  678. }
  679. else
  680. {
  681. }
  682. return spoke;
  683. }
  684. bool CAI_Expresser::FireEntIOFromResponse( char *response, CBaseEntity *pInitiator )
  685. {
  686. // find the space-separator in the response name, then split into entityname, input, and parameter
  687. // may barf in linux; there, should make some StringTokenizer() class that wraps the strtok_s behavior, etc.
  688. char *pszEntname;
  689. char *pszInput;
  690. char *pszParam;
  691. char *strtokContext = NULL;
  692. pszEntname = strtok_s( response, " ", &strtokContext );
  693. if ( !pszEntname )
  694. {
  695. Warning( "Response was entityio but had bad value %s\n", response );
  696. return false;
  697. }
  698. pszInput = strtok_s( NULL, " ", &strtokContext );
  699. if ( !pszInput )
  700. {
  701. Warning( "Response was entityio but had bad value %s\n", response );
  702. return false;
  703. }
  704. pszParam = strtok_s( NULL, " ", &strtokContext );
  705. // poke entity io
  706. CBaseEntity *pTarget = gEntList.FindEntityByName( NULL, pszEntname, pInitiator );
  707. if ( !pTarget )
  708. {
  709. Msg( "Response rule targeted %s with entityio, but that doesn't exist.\n", pszEntname );
  710. // but this is actually a legit use case, so return true (below).
  711. }
  712. else
  713. {
  714. // pump the action into the target
  715. variant_t variant;
  716. if ( pszParam )
  717. {
  718. variant.SetString( MAKE_STRING(pszParam) );
  719. }
  720. pTarget->AcceptInput( pszInput, pInitiator, pInitiator, variant, 0 );
  721. }
  722. return true;
  723. }
  724. //-----------------------------------------------------------------------------
  725. // Purpose:
  726. // Input : *response -
  727. // Output : float
  728. //-----------------------------------------------------------------------------
  729. float CAI_Expresser::GetResponseDuration( AI_Response *result )
  730. {
  731. Assert( result );
  732. char response[ 256 ];
  733. result->GetResponse( response, sizeof( response ) );
  734. switch ( result->GetType() )
  735. {
  736. case ResponseRules::RESPONSE_SPEAK:
  737. {
  738. return GetOuter()->GetSoundDuration( response, STRING( GetOuter()->GetModelName() ) );
  739. }
  740. break;
  741. case ResponseRules::RESPONSE_SENTENCE:
  742. {
  743. Assert( 0 );
  744. return 999.0f;
  745. }
  746. break;
  747. case ResponseRules::RESPONSE_SCENE:
  748. {
  749. return GetSceneDuration( response );
  750. }
  751. break;
  752. case ResponseRules::RESPONSE_RESPONSE:
  753. {
  754. // This should have been recursively resolved already
  755. Assert( 0 );
  756. }
  757. break;
  758. case ResponseRules::RESPONSE_PRINT:
  759. {
  760. return 1.0;
  761. }
  762. break;
  763. default:
  764. case ResponseRules::RESPONSE_NONE:
  765. case ResponseRules::RESPONSE_ENTITYIO:
  766. return 0.0f;
  767. }
  768. return 0.0f;
  769. }
  770. //-----------------------------------------------------------------------------
  771. // Purpose: Placeholder for rules based response system
  772. // Input : concept -
  773. // Output : Returns true on success, false on failure.
  774. //-----------------------------------------------------------------------------
  775. bool CAI_Expresser::Speak( AIConcept_t &concept, const char *modifiers /*= NULL*/, char *pszOutResponseChosen /* = NULL*/, size_t bufsize /* = 0 */, IRecipientFilter *filter /* = NULL */ )
  776. {
  777. concept.SetSpeaker( GetOuter() );
  778. AI_CriteriaSet criteria;
  779. GatherCriteria(&criteria, concept, modifiers);
  780. return Speak( concept, &criteria, pszOutResponseChosen, bufsize, filter );
  781. }
  782. //-----------------------------------------------------------------------------
  783. // Purpose:
  784. //-----------------------------------------------------------------------------
  785. bool CAI_Expresser::Speak( AIConcept_t &concept, AI_CriteriaSet * RESTRICT criteria, char *pszOutResponseChosen , size_t bufsize , IRecipientFilter *filter )
  786. {
  787. VPROF("CAI_Expresser::Speak");
  788. if ( IsSpeechGloballySuppressed() )
  789. {
  790. return false;
  791. }
  792. GetOuter()->ModifyOrAppendDerivedCriteria(*criteria);
  793. AI_Response result;
  794. if ( !FindResponse( result, concept, criteria ) )
  795. {
  796. return false;
  797. }
  798. SpeechMsg( GetOuter(), "%s (%x) spoke %s (%f)", STRING(GetOuter()->GetEntityName()), GetOuter(), (const char*)concept, gpGlobals->curtime );
  799. // Msg( "%s:%s to %s:%s\n", GetOuter()->GetDebugName(), concept.GetStringConcept(), criteria.GetValue(criteria.FindCriterionIndex("Subject")), pTarget ? pTarget->GetDebugName() : "none" );
  800. bool spoke = SpeakDispatchResponse( concept, &result, criteria, filter );
  801. if ( pszOutResponseChosen )
  802. {
  803. result.GetResponse( pszOutResponseChosen, bufsize );
  804. }
  805. return spoke;
  806. }
  807. //-----------------------------------------------------------------------------
  808. // Purpose:
  809. //-----------------------------------------------------------------------------
  810. bool CAI_Expresser::SpeakRawScene( const char *pszScene, float delay, AI_Response *response, IRecipientFilter *filter /* = NULL */ )
  811. {
  812. float sceneLength = GetOuter()->PlayScene( pszScene, delay, response, filter );
  813. if ( sceneLength > 0 )
  814. {
  815. SpeechMsg( GetOuter(), "SpeakRawScene( %s, %f) %f\n", pszScene, delay, sceneLength );
  816. #if defined( HL2_EPISODIC ) || defined( TF_DLL ) || defined( TERROR )
  817. char szInstanceFilename[256];
  818. GetOuter()->GenderExpandString( pszScene, szInstanceFilename, sizeof( szInstanceFilename ) );
  819. // Only mark ourselves as speaking if the scene has speech
  820. if ( GetSceneSpeechCount(szInstanceFilename) > 0 )
  821. {
  822. NoteSpeaking( sceneLength, delay );
  823. }
  824. #else
  825. NoteSpeaking( sceneLength, delay );
  826. #endif
  827. return true;
  828. }
  829. return false;
  830. }
  831. // This will create a fake .vcd/CChoreoScene to wrap the sound to be played
  832. bool CAI_Expresser::SpeakAutoGeneratedScene( char const *soundname, float delay )
  833. {
  834. float speakTime = GetOuter()->PlayAutoGeneratedSoundScene( soundname );
  835. if ( speakTime > 0 )
  836. {
  837. SpeechMsg( GetOuter(), "SpeakAutoGeneratedScene( %s, %f) %f\n", soundname, delay, speakTime );
  838. NoteSpeaking( speakTime, delay );
  839. return true;
  840. }
  841. return false;
  842. }
  843. //-------------------------------------
  844. int CAI_Expresser::SpeakRawSentence( const char *pszSentence, float delay, float volume, soundlevel_t soundlevel, CBaseEntity *pListener )
  845. {
  846. int sentenceIndex = -1;
  847. if ( !pszSentence )
  848. return sentenceIndex;
  849. if ( pszSentence[0] == AI_SP_SPECIFIC_SENTENCE )
  850. {
  851. sentenceIndex = SENTENCEG_Lookup( pszSentence );
  852. if( sentenceIndex == -1 )
  853. {
  854. // sentence not found
  855. return -1;
  856. }
  857. CPASAttenuationFilter filter( GetOuter(), soundlevel );
  858. CBaseEntity::EmitSentenceByIndex( filter, GetOuter()->entindex(), CHAN_VOICE, sentenceIndex, volume, soundlevel, 0, GetVoicePitch());
  859. }
  860. else
  861. {
  862. sentenceIndex = SENTENCEG_PlayRndSz( GetOuter()->NetworkProp()->edict(), pszSentence, volume, soundlevel, 0, GetVoicePitch() );
  863. }
  864. SpeechMsg( GetOuter(), "SpeakRawSentence( %s, %f) %f\n", pszSentence, delay, engine->SentenceLength( sentenceIndex ) );
  865. NoteSpeaking( engine->SentenceLength( sentenceIndex ), delay );
  866. return sentenceIndex;
  867. }
  868. //-------------------------------------
  869. void CAI_Expresser::BlockSpeechUntil( float time )
  870. {
  871. SpeechMsg( GetOuter(), "BlockSpeechUntil(%f) %f\n", time, time - gpGlobals->curtime );
  872. m_flBlockedTalkTime = time;
  873. }
  874. //-------------------------------------
  875. void CAI_Expresser::NoteSpeaking( float duration, float delay )
  876. {
  877. duration += delay;
  878. GetSink()->OnStartSpeaking();
  879. if ( duration <= 0 )
  880. {
  881. // no duration :(
  882. m_flStopTalkTime = gpGlobals->curtime + 3;
  883. duration = 0;
  884. }
  885. else
  886. {
  887. m_flStopTalkTime = gpGlobals->curtime + duration;
  888. }
  889. m_flStopTalkTimeWithoutDelay = m_flStopTalkTime - delay;
  890. SpeechMsg( GetOuter(), "NoteSpeaking( %f, %f ) (stop at %f)\n", duration, delay, m_flStopTalkTime );
  891. if ( GetSink()->UseSemaphore() )
  892. {
  893. CAI_TimedSemaphore *pSemaphore = GetMySpeechSemaphore( GetOuter() );
  894. if ( pSemaphore )
  895. {
  896. pSemaphore->Acquire( duration, GetOuter() );
  897. }
  898. }
  899. }
  900. //-------------------------------------
  901. void CAI_Expresser::ForceNotSpeaking( void )
  902. {
  903. if ( IsSpeaking() )
  904. {
  905. m_flStopTalkTime = gpGlobals->curtime;
  906. m_flStopTalkTimeWithoutDelay = gpGlobals->curtime;
  907. CAI_TimedSemaphore *pSemaphore = GetMySpeechSemaphore( GetOuter() );
  908. if ( pSemaphore )
  909. {
  910. if ( pSemaphore->GetOwner() == GetOuter() )
  911. {
  912. pSemaphore->Release();
  913. }
  914. }
  915. }
  916. }
  917. //-------------------------------------
  918. bool CAI_Expresser::IsSpeaking( void )
  919. {
  920. if ( m_flStopTalkTime > gpGlobals->curtime )
  921. SpeechMsg( GetOuter(), "IsSpeaking() %f\n", m_flStopTalkTime - gpGlobals->curtime );
  922. if ( m_flLastTimeAcceptedSpeak == gpGlobals->curtime ) // only one speak accepted per think
  923. return true;
  924. return ( m_flStopTalkTime > gpGlobals->curtime );
  925. }
  926. //-------------------------------------
  927. bool CAI_Expresser::CanSpeak()
  928. {
  929. if ( m_flLastTimeAcceptedSpeak == gpGlobals->curtime ) // only one speak accepted per think
  930. return false;
  931. float timeOk = MAX( m_flStopTalkTime, m_flBlockedTalkTime );
  932. return ( timeOk <= gpGlobals->curtime );
  933. }
  934. //-----------------------------------------------------------------------------
  935. // Purpose: Returns true if it's ok for this entity to speak after himself.
  936. // The base CanSpeak() includes the default speech delay, and won't
  937. // return true until that delay time has passed after finishing the
  938. // speech. This returns true as soon as the speech finishes.
  939. //-----------------------------------------------------------------------------
  940. bool CAI_Expresser::CanSpeakAfterMyself()
  941. {
  942. if ( m_flLastTimeAcceptedSpeak == gpGlobals->curtime ) // only one speak accepted per think
  943. return false;
  944. float timeOk = MAX( m_flStopTalkTimeWithoutDelay, m_flBlockedTalkTime );
  945. return ( timeOk <= gpGlobals->curtime );
  946. }
  947. //-------------------------------------
  948. bool CAI_Expresser::CanSpeakConcept( AIConcept_t concept )
  949. {
  950. // Not in history?
  951. int iter = m_ConceptHistories.Find( concept );
  952. if ( iter == m_ConceptHistories.InvalidIndex() )
  953. {
  954. return true;
  955. }
  956. ConceptHistory_t *history = &m_ConceptHistories[iter];
  957. Assert( history );
  958. const AI_Response &response = history->m_response;
  959. if ( response.IsEmpty() )
  960. return true;
  961. if ( response.GetSpeakOnce() )
  962. return false;
  963. float respeakDelay = response.GetRespeakDelay();
  964. if ( respeakDelay != 0.0f )
  965. {
  966. if ( history->timeSpoken != -1 && ( gpGlobals->curtime < history->timeSpoken + respeakDelay ) )
  967. return false;
  968. }
  969. return true;
  970. }
  971. //-------------------------------------
  972. bool CAI_Expresser::SpokeConcept( AIConcept_t concept )
  973. {
  974. return GetTimeSpokeConcept( concept ) != -1.f;
  975. }
  976. //-------------------------------------
  977. float CAI_Expresser::GetTimeSpokeConcept( AIConcept_t concept )
  978. {
  979. int iter = m_ConceptHistories.Find( concept );
  980. if ( iter == m_ConceptHistories.InvalidIndex() )
  981. return -1;
  982. ConceptHistory_t *h = &m_ConceptHistories[iter];
  983. return h->timeSpoken;
  984. }
  985. //-------------------------------------
  986. void CAI_Expresser::SetSpokeConcept( AIConcept_t concept, AI_Response *response, bool bCallback )
  987. {
  988. int idx = m_ConceptHistories.Find( concept );
  989. if ( idx == m_ConceptHistories.InvalidIndex() )
  990. {
  991. ConceptHistory_t h;
  992. h.timeSpoken = gpGlobals->curtime;
  993. idx = m_ConceptHistories.Insert( concept, h );
  994. }
  995. ConceptHistory_t *slot = &m_ConceptHistories[ idx ];
  996. slot->timeSpoken = gpGlobals->curtime;
  997. // Update response info
  998. if ( response )
  999. {
  1000. slot->m_response = *response;
  1001. }
  1002. if ( bCallback )
  1003. GetSink()->OnSpokeConcept( concept, response );
  1004. }
  1005. //-------------------------------------
  1006. void CAI_Expresser::ClearSpokeConcept( AIConcept_t concept )
  1007. {
  1008. m_ConceptHistories.Remove( concept );
  1009. }
  1010. //-------------------------------------
  1011. void CAI_Expresser::DumpHistories()
  1012. {
  1013. int c = 1;
  1014. for ( int i = m_ConceptHistories.First(); i != m_ConceptHistories.InvalidIndex(); i = m_ConceptHistories.Next(i ) )
  1015. {
  1016. ConceptHistory_t *h = &m_ConceptHistories[ i ];
  1017. DevMsg( "%i: %s at %f\n", c++, m_ConceptHistories.GetElementName( i ), h->timeSpoken );
  1018. }
  1019. }
  1020. //-------------------------------------
  1021. bool CAI_Expresser::IsValidResponse( ResponseType_t type, const char *pszValue )
  1022. {
  1023. if ( type == ResponseRules::RESPONSE_SCENE )
  1024. {
  1025. char szInstanceFilename[256];
  1026. GetOuter()->GenderExpandString( pszValue, szInstanceFilename, sizeof( szInstanceFilename ) );
  1027. return ( GetSceneDuration( szInstanceFilename ) > 0 );
  1028. }
  1029. return true;
  1030. }
  1031. //-----------------------------------------------------------------------------
  1032. // Purpose:
  1033. //-----------------------------------------------------------------------------
  1034. CAI_TimedSemaphore *CAI_Expresser::GetMySpeechSemaphore( CBaseEntity *pNpc )
  1035. {
  1036. if ( !pNpc->MyNPCPointer() )
  1037. return NULL;
  1038. return (pNpc->MyNPCPointer()->IsPlayerAlly() ? &g_AIFriendliesTalkSemaphore : &g_AIFoesTalkSemaphore );
  1039. }
  1040. //-----------------------------------------------------------------------------
  1041. // Purpose:
  1042. //-----------------------------------------------------------------------------
  1043. void CAI_Expresser::SpeechMsg( CBaseEntity *pFlex, const char *pszFormat, ... )
  1044. {
  1045. if ( !DebuggingSpeech() )
  1046. return;
  1047. if ( pFlex->MyNPCPointer() )
  1048. {
  1049. DevMsg( pFlex->MyNPCPointer(), "%s", CFmtStr( &pszFormat ).String() );
  1050. }
  1051. else
  1052. {
  1053. DevMsg( "%s", CFmtStr( &pszFormat ).String() );
  1054. }
  1055. UTIL_LogPrintf( "%s", CFmtStr( &pszFormat ).String() );
  1056. }
  1057. //-----------------------------------------------------------------------------
  1058. // Purpose: returns true when l4d is in credits screen or some other
  1059. // speech-forbidden state
  1060. //-----------------------------------------------------------------------------
  1061. bool CAI_Expresser::IsSpeechGloballySuppressed()
  1062. {
  1063. return false;
  1064. }
  1065. //-----------------------------------------------------------------------------
  1066. void CAI_ExpresserHost_NPC_DoModifyOrAppendCriteria( CAI_BaseNPC *pSpeaker, AI_CriteriaSet& set )
  1067. {
  1068. // Append current activity name
  1069. const char *pActivityName = pSpeaker->GetActivityName( pSpeaker->GetActivity() );
  1070. if ( pActivityName )
  1071. {
  1072. set.AppendCriteria( "activity", pActivityName );
  1073. }
  1074. static const char *pStateNames[] = { "None", "Idle", "Alert", "Combat", "Scripted", "PlayDead", "Dead" };
  1075. if ( (int)pSpeaker->m_NPCState < ARRAYSIZE(pStateNames) )
  1076. {
  1077. set.AppendCriteria( "npcstate", UTIL_VarArgs( "[NPCState::%s]", pStateNames[pSpeaker->m_NPCState] ) );
  1078. }
  1079. if ( pSpeaker->GetEnemy() )
  1080. {
  1081. set.AppendCriteria( "enemy", pSpeaker->GetEnemy()->GetClassname() );
  1082. set.AppendCriteria( "timesincecombat", "-1" );
  1083. }
  1084. else
  1085. {
  1086. if ( pSpeaker->GetLastEnemyTime() == 0.0 )
  1087. set.AppendCriteria( "timesincecombat", "999999.0" );
  1088. else
  1089. set.AppendCriteria( "timesincecombat", UTIL_VarArgs( "%f", gpGlobals->curtime - pSpeaker->GetLastEnemyTime() ) );
  1090. }
  1091. set.AppendCriteria( "speed", UTIL_VarArgs( "%.3f", pSpeaker->GetSmoothedVelocity().Length() ) );
  1092. CBaseCombatWeapon *weapon = pSpeaker->GetActiveWeapon();
  1093. if ( weapon )
  1094. {
  1095. set.AppendCriteria( "weapon", weapon->GetClassname() );
  1096. }
  1097. else
  1098. {
  1099. set.AppendCriteria( "weapon", "none" );
  1100. }
  1101. CBasePlayer *pPlayer = AI_GetSinglePlayer();
  1102. if ( pPlayer )
  1103. {
  1104. Vector distance = pPlayer->GetAbsOrigin() - pSpeaker->GetAbsOrigin();
  1105. set.AppendCriteria( "distancetoplayer", UTIL_VarArgs( "%f", distance.Length() ) );
  1106. }
  1107. else
  1108. {
  1109. set.AppendCriteria( "distancetoplayer", UTIL_VarArgs( "%i", MAX_COORD_RANGE ) );
  1110. }
  1111. if ( pSpeaker->HasCondition( COND_SEE_PLAYER ) )
  1112. {
  1113. set.AppendCriteria( "seeplayer", "1" );
  1114. }
  1115. else
  1116. {
  1117. set.AppendCriteria( "seeplayer", "0" );
  1118. }
  1119. if ( pPlayer && pPlayer->FInViewCone( pSpeaker ) && pPlayer->FVisible( pSpeaker ) )
  1120. {
  1121. set.AppendCriteria( "seenbyplayer", "1" );
  1122. }
  1123. else
  1124. {
  1125. set.AppendCriteria( "seenbyplayer", "0" );
  1126. }
  1127. }
  1128. //-----------------------------------------------------------------------------
  1129. #if !defined( CSTRIKE_DLL )
  1130. CON_COMMAND( npc_speakall, "Force the npc to try and speak all their responses" )
  1131. {
  1132. if ( !UTIL_IsCommandIssuedByServerAdmin() )
  1133. return;
  1134. CBaseEntity *pEntity;
  1135. if ( args[1] && *args[1] )
  1136. {
  1137. pEntity = gEntList.FindEntityByName( NULL, args[1], NULL );
  1138. if ( !pEntity )
  1139. {
  1140. pEntity = gEntList.FindEntityByClassname( NULL, args[1] );
  1141. }
  1142. }
  1143. else
  1144. {
  1145. pEntity = UTIL_GetCommandClient() ? UTIL_GetCommandClient()->FindPickerEntity() : NULL;
  1146. }
  1147. if ( pEntity )
  1148. {
  1149. CAI_BaseNPC *pNPC = pEntity->MyNPCPointer();
  1150. if (pNPC)
  1151. {
  1152. if ( pNPC->GetExpresser() )
  1153. {
  1154. bool save = engine->LockNetworkStringTables( false );
  1155. pNPC->GetExpresser()->TestAllResponses();
  1156. engine->LockNetworkStringTables( save );
  1157. }
  1158. }
  1159. }
  1160. }
  1161. #endif
  1162. //-----------------------------------------------------------------------------
  1163. CMultiplayer_Expresser::CMultiplayer_Expresser( CBaseFlex *pOuter ) : CAI_ExpresserWithFollowup( pOuter )
  1164. {
  1165. m_bAllowMultipleScenes = false;
  1166. }
  1167. bool CMultiplayer_Expresser::IsSpeaking( void )
  1168. {
  1169. if ( m_bAllowMultipleScenes )
  1170. {
  1171. return false;
  1172. }
  1173. return CAI_Expresser::IsSpeaking();
  1174. }
  1175. void CMultiplayer_Expresser::AllowMultipleScenes()
  1176. {
  1177. m_bAllowMultipleScenes = true;
  1178. }
  1179. void CMultiplayer_Expresser::DisallowMultipleScenes()
  1180. {
  1181. m_bAllowMultipleScenes = false;
  1182. }