1 /*
2  * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #ifndef LOG_TAG
16 #define LOG_TAG "AudioFocusParser"
17 #endif
18 
19 #include "audio_focus_parser.h"
20 #ifdef USE_CONFIG_POLICY
21 #include "config_policy_utils.h"
22 #endif
23 
24 #include "media_monitor_manager.h"
25 
26 namespace OHOS {
27 namespace AudioStandard {
28 
29 // Initialize stream map with string vs AudioStreamType
30 std::map<std::string, AudioFocusType> AudioFocusParser::audioFocusMap = {
31     // stream type for audio interrupt
32     {"STREAM_VOICE_CALL",
33         {AudioStreamType::STREAM_VOICE_CALL, SourceType::SOURCE_TYPE_INVALID, true}},
34     {"STREAM_VOICE_CALL_ASSISTANT",
35         {AudioStreamType::STREAM_VOICE_CALL_ASSISTANT, SourceType::SOURCE_TYPE_INVALID, true}},
36     {"STREAM_VOICE_MESSAGE",
37         {AudioStreamType::STREAM_VOICE_MESSAGE, SourceType::SOURCE_TYPE_INVALID, true}},
38     {"STREAM_SYSTEM",
39         {AudioStreamType::STREAM_SYSTEM, SourceType::SOURCE_TYPE_INVALID, true}},
40     {"STREAM_RING",
41         {AudioStreamType::STREAM_RING, SourceType::SOURCE_TYPE_INVALID, true}},
42     {"STREAM_MUSIC",
43         {AudioStreamType::STREAM_MUSIC, SourceType::SOURCE_TYPE_INVALID, true}},
44     {"STREAM_MOVIE",
45         {AudioStreamType::STREAM_MOVIE, SourceType::SOURCE_TYPE_INVALID, true}},
46     {"STREAM_GAME",
47         {AudioStreamType::STREAM_GAME, SourceType::SOURCE_TYPE_INVALID, true}},
48     {"STREAM_SPEECH",
49         {AudioStreamType::STREAM_SPEECH, SourceType::SOURCE_TYPE_INVALID, true}},
50     {"STREAM_NAVIGATION",
51         {AudioStreamType::STREAM_NAVIGATION, SourceType::SOURCE_TYPE_INVALID, true}},
52     {"STREAM_ALARM",
53         {AudioStreamType::STREAM_ALARM, SourceType::SOURCE_TYPE_INVALID, true}},
54     {"STREAM_NOTIFICATION",
55         {AudioStreamType::STREAM_NOTIFICATION, SourceType::SOURCE_TYPE_INVALID, true}},
56     {"STREAM_SYSTEM_ENFORCED",
57         {AudioStreamType::STREAM_SYSTEM_ENFORCED, SourceType::SOURCE_TYPE_INVALID, true}},
58     {"STREAM_DTMF",
59         {AudioStreamType::STREAM_DTMF, SourceType::SOURCE_TYPE_INVALID, true}},
60     {"STREAM_VOICE_ASSISTANT",
61         {AudioStreamType::STREAM_VOICE_ASSISTANT, SourceType::SOURCE_TYPE_INVALID, true}},
62     {"STREAM_ACCESSIBILITY",
63         {AudioStreamType::STREAM_ACCESSIBILITY, SourceType::SOURCE_TYPE_INVALID, true}},
64     {"STREAM_ULTRASONIC",
65         {AudioStreamType::STREAM_ULTRASONIC, SourceType::SOURCE_TYPE_INVALID, true}},
66     {"STREAM_INTERNAL_FORCE_STOP",
67         {AudioStreamType::STREAM_INTERNAL_FORCE_STOP, SourceType::SOURCE_TYPE_INVALID, true}},
68     {"STREAM_VOICE_COMMUNICATION",
69         {AudioStreamType::STREAM_VOICE_COMMUNICATION, SourceType::SOURCE_TYPE_INVALID, true}},
70     {"STREAM_VOICE_RING",
71         {AudioStreamType::STREAM_VOICE_RING, SourceType::SOURCE_TYPE_INVALID, true}},
72     {"STREAM_CAMCORDER",
73         {AudioStreamType::STREAM_CAMCORDER, SourceType::SOURCE_TYPE_INVALID, true}},
74     // source type for audio interrupt
75     {"SOURCE_TYPE_MIC",
76         {AudioStreamType::STREAM_DEFAULT, SourceType::SOURCE_TYPE_MIC, false}},
77     {"SOURCE_TYPE_CAMCORDER",
78         {AudioStreamType::STREAM_DEFAULT, SourceType::SOURCE_TYPE_CAMCORDER, false}},
79     {"SOURCE_TYPE_VOICE_RECOGNITION",
80         {AudioStreamType::STREAM_DEFAULT, SourceType::SOURCE_TYPE_VOICE_RECOGNITION, false}},
81     {"SOURCE_TYPE_WAKEUP",
82         {AudioStreamType::STREAM_DEFAULT, SourceType::SOURCE_TYPE_WAKEUP, false}},
83     {"SOURCE_TYPE_VOICE_COMMUNICATION",
84         {AudioStreamType::STREAM_DEFAULT, SourceType::SOURCE_TYPE_VOICE_COMMUNICATION, false}},
85     {"SOURCE_TYPE_ULTRASONIC",
86         {AudioStreamType::STREAM_DEFAULT, SourceType::SOURCE_TYPE_ULTRASONIC, false}},
87     {"SOURCE_TYPE_PLAYBACK_CAPTURE",
88         {AudioStreamType::STREAM_DEFAULT, SourceType::SOURCE_TYPE_PLAYBACK_CAPTURE, false}},
89     {"SOURCE_TYPE_VOICE_CALL",
90         {AudioStreamType::STREAM_DEFAULT, SourceType::SOURCE_TYPE_VOICE_CALL, false}},
91     {"SOURCE_TYPE_VOICE_MESSAGE",
92         {AudioStreamType::STREAM_DEFAULT, SourceType::SOURCE_TYPE_VOICE_MESSAGE, false}},
93     {"SOURCE_TYPE_REMOTE_CAST",
94         {AudioStreamType::STREAM_DEFAULT, SourceType::SOURCE_TYPE_REMOTE_CAST, false}},
95     {"SOURCE_TYPE_VOICE_TRANSCRIPTION",
96         {AudioStreamType::STREAM_DEFAULT, SourceType::SOURCE_TYPE_VOICE_TRANSCRIPTION, false}},
97     {"SOURCE_TYPE_UNPROCESSED",
98         {AudioStreamType::STREAM_DEFAULT, SourceType::SOURCE_TYPE_UNPROCESSED, false}},
99 };
100 
101 // Initialize action map with string vs InterruptActionType
102 std::map<std::string, InterruptHint> AudioFocusParser::actionMap = {
103     {"DUCK", INTERRUPT_HINT_DUCK},
104     {"PAUSE", INTERRUPT_HINT_PAUSE},
105     {"REJECT", INTERRUPT_HINT_STOP},
106     {"STOP", INTERRUPT_HINT_STOP},
107     {"PLAY", INTERRUPT_HINT_NONE}
108 };
109 
110 // Initialize target map with string vs InterruptActionTarget
111 std::map<std::string, ActionTarget> AudioFocusParser::targetMap = {
112     {"incoming", INCOMING},
113     {"existing", CURRENT},
114     {"both", BOTH},
115 };
116 
117 std::map<std::string, InterruptForceType> AudioFocusParser::forceMap = {
118     {"true", INTERRUPT_FORCE},
119     {"false", INTERRUPT_SHARE},
120 };
121 
AudioFocusParser()122 AudioFocusParser::AudioFocusParser()
123 {
124     AUDIO_DEBUG_LOG("AudioFocusParser ctor");
125 }
126 
~AudioFocusParser()127 AudioFocusParser::~AudioFocusParser()
128 {
129     AUDIO_DEBUG_LOG("AudioFocusParser dtor");
130 }
131 
LoadDefaultConfig(std::map<std::pair<AudioFocusType,AudioFocusType>,AudioFocusEntry> & focusMap)132 void AudioFocusParser::LoadDefaultConfig(std::map<std::pair<AudioFocusType, AudioFocusType>,
133     AudioFocusEntry> &focusMap)
134 {
135 }
136 
LoadConfig(std::map<std::pair<AudioFocusType,AudioFocusType>,AudioFocusEntry> & focusMap)137 int32_t AudioFocusParser::LoadConfig(std::map<std::pair<AudioFocusType, AudioFocusType>,
138     AudioFocusEntry> &focusMap)
139 {
140     xmlDoc *doc = nullptr;
141     xmlNode *rootElement = nullptr;
142 #ifdef USE_CONFIG_POLICY
143     char buf[MAX_PATH_LEN];
144     char *path = GetOneCfgFile(AUDIO_FOCUS_CONFIG_FILE, buf, MAX_PATH_LEN);
145 #else
146     const char *path = AUDIO_FOCUS_CONFIG_FILE;
147 #endif
148     if (path != nullptr && *path != '\0') {
149         doc = xmlReadFile(path, nullptr, 0);
150     }
151     if (doc == nullptr) {
152         AUDIO_ERR_LOG("error: could not parse audio_interrupt_policy_config.xml");
153         LoadDefaultConfig(focusMap);
154         WriteConfigErrorEvent();
155         return ERROR;
156     }
157     rootElement = xmlDocGetRootElement(doc);
158     xmlNode *currNode = rootElement;
159     CHECK_AND_RETURN_RET_LOG(currNode != nullptr, ERROR, "root element is null");
160     if (xmlStrcmp(currNode->name, reinterpret_cast<const xmlChar*>("audio_focus_policy"))) {
161         AUDIO_ERR_LOG("Missing tag - focus_policy in : %s", AUDIO_FOCUS_CONFIG_FILE);
162         WriteConfigErrorEvent();
163         xmlFreeDoc(doc);
164         return ERROR;
165     }
166     if (currNode->children) {
167         currNode = currNode->children;
168     } else {
169         AUDIO_ERR_LOG("Missing child: %s", AUDIO_FOCUS_CONFIG_FILE);
170         xmlFreeDoc(doc);
171         return ERROR;
172     }
173     while (currNode != nullptr) {
174         if ((currNode->type == XML_ELEMENT_NODE) &&
175             (!xmlStrcmp(currNode->name, reinterpret_cast<const xmlChar*>("focus_type")))) {
176             ParseStreams(currNode, focusMap);
177             break;
178         } else {
179             currNode = currNode->next;
180         }
181     }
182     xmlFreeDoc(doc);
183     return SUCCESS;
184 }
185 
WriteConfigErrorEvent()186 void AudioFocusParser::WriteConfigErrorEvent()
187 {
188     std::shared_ptr<Media::MediaMonitor::EventBean> bean = std::make_shared<Media::MediaMonitor::EventBean>(
189         Media::MediaMonitor::AUDIO, Media::MediaMonitor::LOAD_CONFIG_ERROR, Media::MediaMonitor::FAULT_EVENT);
190     bean->Add("CATEGORY", Media::MediaMonitor::AUDIO_INTERRUPT_POLICY_CONFIG);
191     Media::MediaMonitor::MediaMonitorManager::GetInstance().WriteLogMsg(bean);
192 }
193 
ParseFocusChildrenMap(xmlNode * node,const std::string & curStream,std::map<std::pair<AudioFocusType,AudioFocusType>,AudioFocusEntry> & focusMap)194 void AudioFocusParser::ParseFocusChildrenMap(xmlNode *node, const std::string &curStream,
195     std::map<std::pair<AudioFocusType, AudioFocusType>, AudioFocusEntry> &focusMap)
196 {
197     xmlNode *sNode = node;
198     while (sNode) {
199         if (sNode->type == XML_ELEMENT_NODE) {
200             if (!xmlStrcmp(sNode->name, reinterpret_cast<const xmlChar*>("deny"))) {
201                 ParseRejectedStreams(sNode->children, curStream, focusMap);
202             } else {
203                 ParseAllowedStreams(sNode->children, curStream, focusMap);
204             }
205         }
206         sNode = sNode->next;
207     }
208 }
209 
ParseFocusMap(xmlNode * node,const std::string & curStream,std::map<std::pair<AudioFocusType,AudioFocusType>,AudioFocusEntry> & focusMap)210 void AudioFocusParser::ParseFocusMap(xmlNode *node, const std::string &curStream,
211     std::map<std::pair<AudioFocusType, AudioFocusType>, AudioFocusEntry> &focusMap)
212 {
213     xmlNode *currNode = node;
214     while (currNode != nullptr) {
215         if (currNode->type == XML_ELEMENT_NODE) {
216             if (!xmlStrcmp(currNode->name, reinterpret_cast<const xmlChar*>("focus_table"))) {
217                 AUDIO_DEBUG_LOG("node type: Element, name: %s", currNode->name);
218                 ParseFocusChildrenMap(currNode->children, curStream, focusMap);
219             }
220         }
221         currNode = currNode->next;
222     }
223 }
224 
ParseStreams(xmlNode * node,std::map<std::pair<AudioFocusType,AudioFocusType>,AudioFocusEntry> & focusMap)225 void AudioFocusParser::ParseStreams(xmlNode *node,
226     std::map<std::pair<AudioFocusType, AudioFocusType>, AudioFocusEntry> &focusMap)
227 {
228     xmlNode *currNode = node;
229     while (currNode) {
230         if (currNode->type == XML_ELEMENT_NODE) {
231             char *sType = reinterpret_cast<char*>(xmlGetProp(currNode,
232                 reinterpret_cast<xmlChar*>(const_cast<char*>("value"))));
233             std::string typeStr(sType);
234             std::map<std::string, AudioFocusType>::iterator it = audioFocusMap.find(typeStr);
235             if (it != audioFocusMap.end()) {
236                 AUDIO_DEBUG_LOG("stream type: %{public}s",  sType);
237                 ParseFocusMap(currNode->children, typeStr, focusMap);
238             }
239             xmlFree(sType);
240         }
241         currNode = currNode->next;
242     }
243 }
244 
AddRejectedFocusEntry(xmlNode * currNode,const std::string & curStream,std::map<std::pair<AudioFocusType,AudioFocusType>,AudioFocusEntry> & focusMap)245 void AudioFocusParser::AddRejectedFocusEntry(xmlNode *currNode, const std::string &curStream,
246     std::map<std::pair<AudioFocusType, AudioFocusType>, AudioFocusEntry> &focusMap)
247 {
248     char *newStream = reinterpret_cast<char*>(xmlGetProp(currNode,
249         reinterpret_cast<xmlChar*>(const_cast<char*>("value"))));
250 
251     std::string newStreamStr(newStream);
252     std::map<std::string, AudioFocusType>::iterator it1 = audioFocusMap.find(newStreamStr);
253     if (it1 != audioFocusMap.end()) {
254         std::pair<AudioFocusType, AudioFocusType> rejectedStreamsPair =
255             std::make_pair(audioFocusMap[curStream], audioFocusMap[newStreamStr]);
256         AudioFocusEntry rejectedFocusEntry;
257         rejectedFocusEntry.actionOn = INCOMING;
258         rejectedFocusEntry.hintType = INTERRUPT_HINT_STOP;
259         rejectedFocusEntry.forceType = INTERRUPT_FORCE;
260         rejectedFocusEntry.isReject = true;
261         focusMap.emplace(rejectedStreamsPair, rejectedFocusEntry);
262 
263         AUDIO_DEBUG_LOG("current stream: %s, incoming stream: %s", curStream.c_str(), newStreamStr.c_str());
264         AUDIO_DEBUG_LOG("actionOn: %d, hintType: %d, forceType: %d isReject: %d",
265             rejectedFocusEntry.actionOn, rejectedFocusEntry.hintType,
266             rejectedFocusEntry.forceType, rejectedFocusEntry.isReject);
267     }
268     xmlFree(newStream);
269 }
270 
ParseRejectedStreams(xmlNode * node,const std::string & curStream,std::map<std::pair<AudioFocusType,AudioFocusType>,AudioFocusEntry> & focusMap)271 void AudioFocusParser::ParseRejectedStreams(xmlNode *node, const std::string &curStream,
272     std::map<std::pair<AudioFocusType, AudioFocusType>, AudioFocusEntry> &focusMap)
273 {
274     xmlNode *currNode = node;
275 
276     while (currNode) {
277         if (currNode->type == XML_ELEMENT_NODE) {
278             if (!xmlStrcmp(currNode->name, reinterpret_cast<const xmlChar*>("focus_type"))) {
279                 AddRejectedFocusEntry(currNode, curStream, focusMap);
280             }
281         }
282         currNode = currNode->next;
283     }
284 }
285 
AddAllowedFocusEntry(xmlNode * currNode,const std::string & curStream,std::map<std::pair<AudioFocusType,AudioFocusType>,AudioFocusEntry> & focusMap)286 void AudioFocusParser::AddAllowedFocusEntry(xmlNode *currNode, const std::string &curStream,
287     std::map<std::pair<AudioFocusType, AudioFocusType>, AudioFocusEntry> &focusMap)
288 {
289     char *newStream = reinterpret_cast<char*>(xmlGetProp(currNode,
290         reinterpret_cast<xmlChar*>(const_cast<char*>("value"))));
291     char *aType = reinterpret_cast<char*>(xmlGetProp(currNode,
292         reinterpret_cast<xmlChar*>(const_cast<char*>("action_type"))));
293     char *aTarget = reinterpret_cast<char*>(xmlGetProp(currNode,
294         reinterpret_cast<xmlChar*>(const_cast<char*>("action_on"))));
295     char *isForced = reinterpret_cast<char*>(xmlGetProp(currNode,
296         reinterpret_cast<xmlChar*>(const_cast<char*>("is_forced"))));
297 
298     std::string newStreamStr(newStream);
299     std::map<std::string, AudioFocusType>::iterator it1 = audioFocusMap.find(newStreamStr);
300     std::string aTargetStr(aTarget);
301     std::map<std::string, ActionTarget>::iterator it2 = targetMap.find(aTargetStr);
302     std::string aTypeStr(aType);
303     std::map<std::string, InterruptHint>::iterator it3 = actionMap.find(aTypeStr);
304     std::string isForcedStr(isForced);
305     std::map<std::string, InterruptForceType>::iterator it4 = forceMap.find(isForcedStr);
306     if ((it1 != audioFocusMap.end()) && (it2 != targetMap.end()) && (it3 != actionMap.end()) &&
307         (it4 != forceMap.end())) {
308         std::pair<AudioFocusType, AudioFocusType> allowedStreamsPair =
309             std::make_pair(audioFocusMap[curStream], audioFocusMap[newStreamStr]);
310         AudioFocusEntry allowedFocusEntry;
311         allowedFocusEntry.actionOn = targetMap[aTargetStr];
312         allowedFocusEntry.hintType = actionMap[aTypeStr];
313         allowedFocusEntry.forceType = forceMap[isForcedStr];
314         allowedFocusEntry.isReject = false;
315         focusMap.emplace(allowedStreamsPair, allowedFocusEntry);
316 
317         AUDIO_DEBUG_LOG("current stream: %s, incoming stream: %s", curStream.c_str(), newStreamStr.c_str());
318         AUDIO_DEBUG_LOG("actionOn: %d, hintType: %d, forceType: %d isReject: %d",
319             allowedFocusEntry.actionOn, allowedFocusEntry.hintType,
320             allowedFocusEntry.forceType, allowedFocusEntry.isReject);
321     }
322     xmlFree(newStream);
323     xmlFree(aType);
324     xmlFree(aTarget);
325     xmlFree(isForced);
326 }
327 
ParseAllowedStreams(xmlNode * node,const std::string & curStream,std::map<std::pair<AudioFocusType,AudioFocusType>,AudioFocusEntry> & focusMap)328 void AudioFocusParser::ParseAllowedStreams(xmlNode *node, const std::string &curStream,
329     std::map<std::pair<AudioFocusType, AudioFocusType>, AudioFocusEntry> &focusMap)
330 {
331     xmlNode *currNode = node;
332 
333     while (currNode) {
334         if (currNode->type == XML_ELEMENT_NODE) {
335             if (!xmlStrcmp(currNode->name, reinterpret_cast<const xmlChar*>("focus_type"))) {
336                 AddAllowedFocusEntry(currNode, curStream, focusMap);
337             }
338         }
339         currNode = currNode->next;
340     }
341 }
342 } // namespace AudioStandard
343 } // namespace OHOS
344